aiohttp.get

Here are the examples of the python api aiohttp.get taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

29 Examples 7

Example 1

Project: Jumper-Cogs
License: View license
Source File: pokedex.py
    async def _evolution_pokedex(self, pokemon):
        """Show a pokemon's evolution chain
        Example !pokedex evolution bulbasaur"""
        if len(pokemon) > 0:
            url = "http://pokemondb.net/pokedex/" + str(pokemon)
            async with aiohttp.get(url) as response:
                try:
                    soup = BeautifulSoup(await response.text(), "html.parser")
                    div = soup.find('div', attrs={'class':
                                                  'infocard-evo-list'})
                    evo = str(div.text.strip())
                    await self.bot.say("```" + evo + "```")
                except:
                    await self.bot.say(str(pokemon) +
                                       " does not have an evolution chain")
        else:
            await self.bot.say("Please input a pokemon name.")

Example 2

Project: Jumper-Cogs
License: View license
Source File: tibia.py
    async def _online_tibia(self):
        """Get total players playing"""
        url = "http://www.tibia.com/community/?subtopic=worlds"
        try:
            async with aiohttp.get(url) as response:
                soup = BeautifulSoup(await response.text(), "html.parser")
                div1 = soup.find('div', attrs={'id': 'RightArtwork'})
                div2 = div1.find('div', attrs={'id': 'PlayersOnline'})
                test = div2.get_text()
                test1 = test.replace("Players Online", "")
                new = "Players currently playing Tibia: " + test1
                # div2 = div1.find('div', attrs={'class': 'Border_2'})
                # div3 = div2.find('div', attrs={'class': 'Border_3'})
                # table = div3.find_all('table', attrs={'class': 'Table1'})
                # tr = table.find_all('tr')
                # tbody = div4.find('div', attrs={'class': 'CaptionInnerContainer'})
                await self.bot.say(str(new))
        except:
            await self.bot.say("Could not retrive data. The webserver may be offline.")

Example 3

Project: Jumper-Cogs
License: View license
Source File: tibia.py
    async def _server_tibia(self, servername):
        """Get Server Info"""
        servername = servername.title()
        url = "https://secure.tibia.com/community/?subtopic=worlds&world=" + str(servername)
        try:
            async with aiohttp.get(url) as response:
                soup = BeautifulSoup(await response.text(), "html5lib")
                b = soup.find_all("table", attrs={'class': 'Table1'})
                new = []
                rows = b[1].tbody.div.find_all('td')
                for row in rows:
                    new.append(row.get_text())
                k = new[::2]
                l = new[1::2]
                zipped = list(zip(k, l))
                t = tabulate(zipped, headers=["Category", "Info"])
                await self.bot.say("```Python" + "\n" + str(t) + "```")
        except:
            await self.bot.say("Unable to retrive server data. The webserver may be offline.")

Example 4

Project: aiotg
License: View license
Source File: bot.py
    def download_file(self, file_path, range=None):
        """
        Download a file from Telegram servers
        """
        headers = {"range": range} if range else None
        url = "{0}/file/bot{1}/{2}".format(API_URL, self.api_token, file_path)
        return aiohttp.get(url, headers=headers)

Example 5

Project: Red-DiscordBot
License: View license
Source File: streams.py
    async def hitbox_online(self, stream):
        url = "https://api.hitbox.tv/user/" + stream
        try:
            async with aiohttp.get(url) as r:
                data = await r.json()
            if data["is_live"] == "0":
                return False
            elif data["is_live"] == "1":
                return True
            elif data["is_live"] is None:
                return None
        except:
            return "error"

Example 6

Project: Red-DiscordBot
License: View license
Source File: streams.py
    async def beam_online(self, stream):
        url = "https://beam.pro/api/v1/channels/" + stream
        try:
            async with aiohttp.get(url) as r:
                data = await r.json()
            if "online" in data:
                if data["online"] is True:
                    return True
                else:
                    return False
            elif "error" in data:
                return None
        except:
            return "error"
        return "error"

Example 7

Project: pomp
License: View license
Source File: e04_aiohttp.py
    @asyncio.coroutine
    def _fetch(self, request, future):
        log.debug("[AiohttpDownloader] Start fetch: %s", request.url)
        r = yield from aiohttp.get(request.url)
        body = yield from r.text()
        log.debug(
            "[AiohttpDownloader] Done %s: %s %s",
            request.url, len(body), body[0:20],
        )
        future.set_result(AiohttpResponse(request, body))

Example 8

Project: aiohttp
License: View license
Source File: test_client.py
async def test_non_close_detached_session_on_error_cm(loop, test_server):
    async def handler(request):
        return web.Response()

    app = web.Application(loop=loop)
    app.router.add_get('/', handler)
    server = await test_server(app)

    cm = aiohttp.get(server.make_url('/'), loop=loop)
    session = cm._session
    assert not session.closed
    with pytest.raises(RuntimeError):
        async with cm as resp:
            resp.content.set_exception(RuntimeError())
            await resp.read()
    assert not session.closed

Example 9

Project: aiohttp
License: View license
Source File: test_client.py
async def test_close_detached_session_on_non_existing_addr(loop):
    cm = aiohttp.get('http://non-existing.example.com', loop=loop)
    session = cm._session
    assert not session.closed
    with pytest.raises(Exception):
        await cm
    assert session.closed

Example 10

Project: aiohttp
License: View license
Source File: test_resp.py
async def test_response_context_manager_error(test_server, loop):

    async def handler(request):
        return web.HTTPOk()

    app = web.Application(loop=loop)
    app.router.add_route('GET', '/', handler)
    server = await test_server(app)
    cm = aiohttp.get(server.make_url('/'), loop=loop)
    session = cm._session
    resp = await cm
    with pytest.raises(RuntimeError):
        async with resp:
            assert resp.status == 200
            resp.content.set_exception(RuntimeError())
            await resp.read()
    assert len(session._connector._conns) == 0

Example 11

Project: aiohttp
License: View license
Source File: test_resp.py
async def test_client_api_context_manager(test_server, loop):

    async def handler(request):
        return web.HTTPOk()

    app = web.Application(loop=loop)
    app.router.add_route('GET', '/', handler)
    server = await test_server(app)

    async with aiohttp.get(server.make_url('/'), loop=loop) as resp:
        assert resp.status == 200
        assert resp.connection is not None
    assert resp.connection is None

Example 12

Project: charlesbot
License: View license
Source File: http.py
@asyncio.coroutine
def http_get_request(url, content_type="application/json"):
    headers = {
        'Content-type': content_type,
    }
    response = yield from aiohttp.get(url, headers=headers)
    if not response.status == 200:
        text = yield from response.text()
        log.error("URL: %s" % url)
        log.error("Response status code was %s" % str(response.status))
        log.error(response.headers)
        log.error(text)
        response.close()
        return ""
    return (yield from response.text())

Example 13

Project: pypi2deb
License: View license
Source File: pypi.py
@asyncio.coroutine
def get_pypi_info(name, version=None):
    url = PYPI_JSON_URL + '/' + name
    if version:
        url += '/' + version
    url += '/json'
    try:
        response = yield from aiohttp.get(url, connector=conn)
    except Exception as err:
        log.error('invalid project name: {} ({})'.format(name, err))
    else:
        try:
            result = yield from response.json()
        except Exception as err:
            log.warn('cannot download %s %s details from PyPI: %r', name, version, err)
            return
        return result

Example 14

Project: Jumper-Cogs
License: View license
Source File: pokedex.py
    async def _moveset_pokedex(self, generation: str, pokemon):
        """Get a pokemon's moveset by generation(1-6).

          Example: !pokedex moveset V pikachu """
        if len(pokemon) > 0:
            if generation == "6" or generation == "VI":
                try:
                    url = "http://pokemondb.net/pokedex/" + str(pokemon)
                    async with aiohttp.get(url) as response:
                        soup = BeautifulSoup(await response.text(),
                                             "html.parser"
                                             )
                        moves = []
                        table = soup.find('table',
                                          attrs={'class':
                                                 'data-table wide-table'
                                                 }
                                          )
                        table_body = table.find('tbody')
                        rows = table_body.find_all('tr')
                        for row in rows:
                            cols = row.find_all('td')
                            cols = [ele.text.strip() for ele in cols]
                            moves.append([ele for ele in cols if ele])

                        t = tabulate(moves, headers=["Level", "Moves", "Type",
                                                     "Category", "Power",
                                                     "Accuracy"])
                        await self.bot.say("```" + str(t) + "```")
                except:
                    await self.bot.say("Could not locate a pokemon with that" +
                                       " name. Try a different name.")

            elif generation == "5" or generation == "V":
                try:
                    url = "http://pokemondb.net/pokedex/" + str(pokemon)
                    # Added a continuation for url, instead of all on one line
                    # to make PEP8 compliant
                    url += "/moves/5"
                    async with aiohttp.get(url) as response:
                        soup = BeautifulSoup(await response.text(),
                                             "html.parser")
                        moves = []
                        table = soup.find('table', attrs={'class':
                                          'data-table wide-table'}
                                          )
                        table_body = table.find('tbody')

                        rows = table_body.find_all('tr')
                        for row in rows:
                            cols = row.find_all('td')
                            cols = [ele.text.strip() for ele in cols]
                            moves.append([ele for ele in cols if ele])

                        t = tabulate(moves, headers=["Level", "Moves", "Type",
                                                     "Category", "Power",
                                                     "Accuracy"])
                        await self.bot.say("```" + str(t) + "```")
                except:
                    await self.bot.say("Could not locate a pokemon with that" +
                                       " name. Try a different name.")

            elif generation == "4" or generation == "IV":
                try:
                    url = "http://pokemondb.net/pokedex/" + str(pokemon)
                    url += "/moves/4"
                    async with aiohttp.get(url) as response:
                        soup = BeautifulSoup(await response.text(),
                                             "html.parser")
                        moves = []
                        table = soup.find('table',
                                          attrs={'class':
                                                 'data-table wide-table'}
                                          )
                        table_body = table.find('tbody')

                        rows = table_body.find_all('tr')
                        for row in rows:
                            cols = row.find_all('td')
                            cols = [ele.text.strip() for ele in cols]
                            moves.append([ele for ele in cols if ele])

                        t = tabulate(moves, headers=["Level", "Moves", "Type",
                                                     "Category", "Power",
                                                     "Accuracy"])
                        await self.bot.say("```" + str(t) + "```")
                except:
                    await self.bot.say("Could not locate a pokemon with that" +
                                       " name. Try a different name.")

            elif generation == "3" or generation == "III":
                try:
                    url = "http://pokemondb.net/pokedex/" + str(pokemon)
                    url += "/moves/3"
                    async with aiohttp.get(url) as response:
                        soup = BeautifulSoup(await response.text(),
                                             "html.parser")
                        moves = []
                        table = soup.find('table', attrs={'class':
                                          'data-table wide-table'}
                                          )
                        table_body = table.find('tbody')

                        rows = table_body.find_all('tr')
                        for row in rows:
                            cols = row.find_all('td')
                            cols = [ele.text.strip() for ele in cols]
                            moves.append([ele for ele in cols if ele])

                        t = tabulate(moves, headers=["Level", "Moves", "Type",
                                                     "Category", "Power",
                                                     "Accuracy"])
                        await self.bot.say("```" + str(t) + "```")
                except:
                    await self.bot.say("Could not locate a pokemon with that" +
                                       " name. Try a different name.")

            elif generation == "2" or generation == "II":
                try:
                    url = "http://pokemondb.net/pokedex/" + str(pokemon)
                    url += "/moves/2"
                    async with aiohttp.get(url) as response:
                        soup = BeautifulSoup(await response.text(),
                                             "html.parser")
                        moves = []
                        table = soup.find('table', attrs={'class':
                                          'data-table wide-table'}
                                          )
                        table_body = table.find('tbody')

                        rows = table_body.find_all('tr')
                        for row in rows:
                            cols = row.find_all('td')
                            cols = [ele.text.strip() for ele in cols]
                            moves.append([ele for ele in cols if ele])

                        t = tabulate(moves, headers=["Level", "Moves", "Type",
                                                     "Category", "Power",
                                                     "Accuracy"])
                        await self.bot.say("```" + str(t) + "```")
                except:
                    await self.bot.say("Could not locate a pokemon with that" +
                                       " name. Try a different name.")

            elif generation == "1" or generation == "I":
                try:
                    url = "http://pokemondb.net/pokedex/" + str(pokemon)
                    url += "/moves/1"
                    async with aiohttp.get(url) as response:
                        soup = BeautifulSoup(await response.text(),
                                             "html.parser")
                        moves = []
                        table = soup.find('table', attrs={'class':
                                          'data-table wide-table'}
                                          )
                        table_body = table.find('tbody')

                        rows = table_body.find_all('tr')
                        for row in rows:
                            cols = row.find_all('td')
                            cols = [ele.text.strip() for ele in cols]
                            moves.append([ele for ele in cols if ele])

                        t = tabulate(moves, headers=["Level", "Moves", "Type",
                                                     "Category", "Power",
                                                     "Accuracy"])
                        await self.bot.say("```" + str(t) + "```")
                except:
                    await self.bot.say("Could not locate a pokemon with that" +
                                       " name. Try a different name.")
            else:
                await self.bot.say("Generation must be " + "**" + "1-6" +
                                   "**" + " or **" + "I-VI**.")

        else:
            await self.bot.say("You need to input a pokemon name to search." +
                               "Input a name and try again."
                               )

Example 15

Project: Jumper-Cogs
License: View license
Source File: language.py
    async def jisho(self, ctx, word):
        """Translates Japanese to English, and English to Japanese
        Works with Romaji, Hiragana, Kanji, and Katakana"""
        channel = ctx.message.channel
        word = word.lower()
        search_args = await self.dict_search_args_parse(word)
        if not search_args:
            return
        limit, query = search_args
        message = urllib.parse.quote(query, encoding='utf-8')
        url = "http://jisho.org/api/v1/search/words?keyword=" + str(message)
        try:
            async with aiohttp.get(url) as response:
                data = await response.json()

            results = data["data"][:limit]

            output = ""

            for result in results:
                japanese = result["japanese"]
                output += self.display_word(japanese[0], "**{reading}**",
                                            "**{word}** {reading}") + "\n"
                new_line = ""
                if result["is_common"]:
                    new_line += "Common word. "
                if result["tags"]:  # it doesn't show jlpt tags, only wk tags?
                    new_line += "Wanikani level " + ", ".join(
                        [tag[8:] for tag in result["tags"]]) + ". "
                if new_line:
                    output += new_line + "\n"
                senses = result["senses"]
                for index, sense in enumerate(senses):
                    # jisho returns null sometimes for some parts of speech... k den
                    parts = [x for x in sense["parts_of_speech"] if x is not None]
                    if parts == ["Wikipedia definition"]:
                        continue
                    if parts:
                        output += "*" + ", ".join(parts) + "*\n"
                    output += str(index + 1) + ". " + "; ".join(
                        sense["english_definitions"])
                    for attr in ["tags", "info"]:
                        if sense[attr]:
                            output += ". *" + "*. *".join(sense[attr]) + "*"
                    if sense["see_also"]:
                        output += ". *See also: " + ", ".join(sense["see_also"]) + "*"
                    output += "\n"
                if len(japanese) > 1:
                    output += "Other forms: " + ", ".join(
                        [self.display_word(x, "{reading}", "{word} ({reading})") for x in
                         japanese[1:]]) + "\n"
            await self.send_long_message(channel, output)
        except:
            await self.bot.say("I was unable to retrieve any data")

Example 16

Project: Jumper-Cogs
License: View license
Source File: pokedex.py
    async def _pokemon_pokedex(self, pokemon):
        """Get a pokemon's pokedex info.
        Example !pokedex pokemon gengar"""
        # We need to check if the length of the input is greater than 0.
        # This is just a catch for when there is no input
        if len(pokemon) > 0:
            # All data is pulled from pokemondb.net
            url = "http://pokemondb.net/pokedex/" + str(pokemon)
            try:
                async with aiohttp.get(url) as response:
                    soup = BeautifulSoup(await response.text(), "html.parser")
                    # This scrapes the pokemon image
                    img = soup.find("img")["src"]
                    # This list holds all the data from the left column
                    poke = []
                    # This list holds all data from the right column
                    pokeh = []
                    # This is the parent table from which the data is scraped
                    table = soup.find('table', attrs={'class': 'vitals-table'})
                    table_body = table.find('tbody')
                    # This will start the scrape for the left column of data
                    headers = table_body.find_all('tr')
                # -------------------Pokedex-Info----------------------------
                    dex_table = soup.find_all('table', attrs={'class': 'vitals-table'})
                    dex_rows = dex_table[4].find_all('tr')
                    dex_info1 = dex_rows[0]
                    dex_info2 = dex_rows[1]
                    dex_text1 = dex_info1.get_text().replace("RedBlue", "")
                    dex_text2 = dex_info2.get_text().replace("Yellow", "")
                # -------------------Pokedex-Info-End---------------------------
                # Iterates through the rows to grab the data held in headers
                # This will also says that if there is no text, don't strip
                    for head in headers:
                        hols = head.find_all('th')
                        hols = [ele.text.strip() for ele in hols]
                        pokeh.append([ele for ele in hols if ele])
                # This will start the scrape for the right column of data
                        rows = table_body.find_all('tr')
                # Same thing with headers, except we are looking for td tags
                    for row in rows:
                        cols = row.find_all('td')
                        cols = [ele.text.strip() for ele in cols]
                        poke.append([ele for ele in cols if ele])

                    poke2 = [x for xs in poke for x in xs]
                    pokeh2 = [x for xs in pokeh for x in xs]
                    m = list(zip(pokeh2, poke2))
                # using the import from tabulate format the combined lists into
                # a nice looking table
                    t = tabulate(m, headers=["Pokedex", "Data"])
                # We add that data. Img is a image, but t is all text so we
                # have to say so with str. \n creates a new line and the ```
                # puts the output into the pretty code block
                    await self.bot.say("\n" + "```" + str(t) + "```")
                    await self.bot.say("```" + dex_text1 + "\n" + dex_text2 + "```")
                    await self.bot.say(img)
            except:
                    await self.bot.say("Could not locate that pokemon." +
                                       " Please try a different name"
                                       )
        else:
            await self.bot.say("Oh no! You didn't input a name. Type a" +
                               " pokemon name to search")

Example 17

Project: Jumper-Cogs
License: View license
Source File: pokedex.py
    async def _stats_pokedex(self, pokemon):
        """Get a pokemon's base stats.
        Example: !pokedex stats squirtle"""
        if len(pokemon) > 0:
            url = "http://pokemondb.net/pokedex/" + str(pokemon)
            async with aiohttp.get(url) as response:
                try:
                    soup = BeautifulSoup(await response.text(), "html.parser")
                    stats = []
                    # This data is always the same, so no need to strip it!
                    base = [["HP"], ["Def"], ["ATK"], ["Sp.Atk"], ["Sp.Def"],
                            ["Speed"]
                            ]
                    divs = soup.find('div', attrs={'class': 'col span-8 '})
                    table = divs.find('table', attrs={'class': 'vitals-table'})
                    table_body = table.find('tbody')

                    rows = table_body.find_all('tr')
                    for row in rows:
                        cols = row.find_all('td')
                        cols = [ele.text.strip() for ele in cols]
                        stats.append([ele for ele in cols if ele])
                    # Because the two lists, base and stats, are different
                    # dimensions and stats is undefined, we have to peform
                    # zip to make an iterator aggregate through the elements
                    statbase = [from_a2 + from_a1
                                for from_a2, from_a1 in zip(base, stats)]
                # Because part of the data is a graph it adds it to stats as
                # a none value, [], we use filter to get rid of it.
                    k = filter(None, statbase)
                # We use tabulate to create the table and create headers
                    t = tabulate(k, headers=["Stat", "Base", "Min", "Max"])
                    await self.bot.say("```" + t + "```")
                except:
                    await self.bot.say("Could not locate that pokemon's" +
                                       " stats. Please try a different name"
                                       )
        else:
            await self.bot.say("Looks like you forgot to put in a pokemon" +
                               " name. Input a name to search"
                               )

Example 18

Project: Jumper-Cogs
License: View license
Source File: pokedex.py
    async def _item_pokedex(self, *, item):
        """Get a description of an item.
        Use '-' for spaces. Example: !pokedex item master-ball
        """
        if len(item) > 0:
            item = item.replace(" ", "-").lower()
            url = "http://pokemondb.net/item/" + str(item)
            async with aiohttp.get(url) as response:
                try:
                    soup = BeautifulSoup(await response.text(), "html.parser")
                    divs = soup.find('p')
                    info = divs.get_text()

                    await self.bot.say("**" + str(item.title()) + ":**" +
                                       "\n" + "```" + str(info) + "```"
                                       )
                except:
                    await self.bot.say("Cannot find an item with this name")
        else:
            await self.bot.say("Please input an item name.")

Example 19

Project: Jumper-Cogs
License: View license
Source File: pokedex.py
    async def _location_pokedex(self, pokemon):
        """Get a pokemon's catch location.
        Example !pokedex location voltorb
        """
        if len(pokemon) > 0:
            url = "http://pokemondb.net/pokedex/" + str(pokemon)
            async with aiohttp.get(url) as response:
                soup = BeautifulSoup(await response.text(), "html.parser")
                loc = []
                version = []
                div2 = soup.find('div', attrs={'class':
                                               'col desk-span-7 lap-span-12'})
                tables = div2.find_all('table', attrs={'class':
                                       'vitals-table'})
                for table in tables:
                    cols = table.find_all('td')
                    cols = [ele.text.strip() for ele in cols]
                    loc.append([ele for ele in cols if ele])
                tables2 = div2.find_all('table', attrs={'class':
                                        'vitals-table'})
                for table2 in tables2:
                    tcols = table2.find_all('th')
                    tcols = [ele.text.strip() for ele in tcols]
                    version.append([ele for ele in tcols if ele])
                # We have to extract out the base index, because it scrapes as
                # a list of a list. Then we can stack and tabulate.
                extract_loc = loc[0]
                extract_version = version[0]
                m = list(zip(extract_version, extract_loc))
                t = tabulate(m, headers=["Game Version", "Location"])

                await self.bot.say("```" + str(t) + "```")
        else:
            await self.bot.say("Unable to find any locations" +
                               "Check your spelling or try a different name."
                               )

Example 20

Project: Jumper-Cogs
License: View license
Source File: tibia.py
    async def _item_tibia(self, *, itemname):
        """Get a item information from tibia wiki"""
        item = itemname.replace(" ", "_").title()
        if len(item) > 0:
            try:
                url = "http://tibia.wikia.com/wiki/" + str(item)
                async with aiohttp.get(url) as response:
                    soup = BeautifulSoup(await response.text(), "html.parser")
                    isearch = soup.find('div', attrs={'id': 'twbox-image'})
                    img = isearch.find("img")["src"]
                    # ----------------------------------------------------
                    try:
                        div1 = soup.find('div', attrs={'id': 'twbox-look'})
                        div2 = div1.find('div', attrs={'class': 'item-look'})
                        info = div2.get_text()
                    except:
                        info = "Could not find info"
                    # ----------------------------------------------------
                    div4 = soup.find('div', attrs={'id': 'tabular-data'})
                    table = div4.find('table', attrs={'class': 'infobox'})
                    rows = table.find_all('tr')
                    column1 = []
                    column2 = []
                    for row in rows:
                        cols = row.find_all('td', attrs={'class': 'property'})
                        cols = [ele.text.strip() for ele in cols]
                        column1.append([ele for ele in cols if ele])
                    for row in rows:
                        cols = row.find_all('td', attrs={'class': 'value'})
                        cols = [ele.text.strip() for ele in cols]
                        column2.append([ele for ele in cols if ele])
                    v = [x for xs in column1 for x in xs]
                    q = [x for xs in column2 for x in xs]
                    j = list(zip(v, q))
                    t = tabulate(j,  headers=["Property", "Value"])
                    # ----------------------------------------------------
                    try:
                        div3 = soup.find('div', attrs={'class': 'item-droppedby-wrapper'})
                        uls = div3.find('ul', attrs={'class': 'creature-list-generic'})
                        lis = uls.find_all('li')
                        results = []
                        for li in lis:
                            hols = li.find_all('a')
                            hols = [ele.text.strip() for ele in hols]
                            results.append([ele for ele in hols if ele])
                        d = [x for xs in results for x in xs]
                        g = "Creatures that drop this item" + "\n" + "-" * 80 + "\n"
                        k = g + self.column_maker(d, cols=3)
                    except ValueError:
                        try:
                            diva = soup.find('div', attrs={'class': 'spoiler-content'})
                            k = diva.get_text()
                        except AttributeError:
                            k = "This item is not dropped by any creatures or quest"

                    await self.bot.say(img + "\n")
                    await self.bot.say("```" + str(info) + "```")
                    await self.bot.say("```" + str(t) + "```")
                    await self.bot.say("```" + str(k) + "```")
            except:
                await self.bot.say("I could not find this item")
        else:
            await self.bot.say("Oh no! You didn't input a name. Type an" +
                               " item name to search")

Example 21

Project: Jumper-Cogs
License: View license
Source File: tibia.py
    async def _monster_tibia(self, *, monster):
        """Get a monster's information from tibia wiki"""
        monster = monster.replace(" ", "_").title()
        if len(monster) > 0:
            try:
                url = "http://tibia.wikia.com/wiki/" + str(monster)
                async with aiohttp.get(url) as response:
                    soup = BeautifulSoup(await response.text(), "html.parser")
                    # -------------------image------------------------------
                    try:
                        isearch = soup.find('div', attrs={'id': 'twbox-image'})
                        img = isearch.find("img")["src"]
                    except:
                        img = "No image found"
                    # ------------------Abilities----------------------------

                    div1 = soup.find('div', attrs={'id': 'creature-abilities'})
                    p = div1.find('p')
                    title = "Abilities" + "\n"
                    header1 = "Abilities" + "\n" + "-" * 80 + "\n"
                    abilities = p.get_text()
                    # ------------------------Loot------------------------
                    div2 = soup.find('div', attrs={'class': 'loot-table'})
                    ul = div2.find('ul')
                    lis = ul.find_all('li')
                    items = []
                    for li in lis:
                        hols = li.find_all('a')
                        hols = [ele.text.strip() for ele in hols]
                        items.append([ele for ele in hols if ele])
                    d = [x for xs in items for x in xs]
                    header = "Loot Table" + "\n" + "-" * 85 + "\n"
                    k = header + self.column_maker(d, cols=4)
                    # -------------------Table-Info--------------------------
                    div4 = soup.find('div', attrs={'id': 'tabular-data'})
                    table = div4.find('table')
                    rows = table.find_all('tr')
                    column1 = []
                    column2 = []
                    for row in rows:
                        cols = row.find_all('td', attrs={'class': 'property'})
                        cols = [ele.text.strip() for ele in cols]
                        column1.append([ele for ele in cols if ele])
                    for row in rows:
                        cols = row.find_all('td', attrs={'class': 'value'})
                        cols = [ele.text.strip() for ele in cols]
                        column2.append([ele for ele in cols if ele])
                    v = [x for xs in column1 for x in xs]
                    q = [x for xs in column2 for x in xs]
                    splitv = self.split_list(v, wanted_parts=2)
                    halfv1 = splitv[0]
                    halfv2 = splitv[1]
                    splitq = self.split_list(q, wanted_parts=2)
                    halfq1 = splitq[0]
                    halfq2 = splitq[1]
                    j = list(zip(halfv1, halfq1, halfv2, halfq2))
                    t = tabulate(j,  headers=["Property", "Value", "Property", "Value"])

                    # ------------------------OUTPUT-------------------------
                    await self.bot.say(img + "\n")
                    await self.bot.say("```" + title + header1 + str(abilities) + "```")
                    await self.bot.say("```" + str(k) + "```")
                    await self.bot.say("```" + str(t) + "```")
            except:
                await self.bot.say("I could not find this creature.")
        else:
            await self.bot.say("Oh no! You didn't input a name. Type an" +
                               " item name to search")

Example 22

Project: aiotg
License: View license
Source File: async.py
async def bitcoin(chat, match):
    url = "https://api.bitcoinaverage.com/ticker/global/USD/"
    async with aiohttp.get(url) as s:
        info = await s.json()
        await chat.send_text(info["24h_avg"])

Example 23

Project: Red-DiscordBot
License: View license
Source File: general.py
    async def urban(self, *, search_terms : str, definition_number : int=1):
        """Urban Dictionary search

        Definition number must be between 1 and 10"""
        # definition_number is just there to show up in the help
        # all this mess is to avoid forcing double quotes on the user
        search_terms = search_terms.split(" ")
        try:
            if len(search_terms) > 1:
                pos = int(search_terms[-1]) - 1
                search_terms = search_terms[:-1]
            else:
                pos = 0
            if pos not in range(0, 11): # API only provides the
                pos = 0                 # top 10 definitions
        except ValueError:
            pos = 0
        search_terms = "+".join(search_terms)
        url = "http://api.urbandictionary.com/v0/define?term=" + search_terms
        try:
            async with aiohttp.get(url) as r:
                result = await r.json()
            if result["list"]:
                definition = result['list'][pos]['definition']
                example = result['list'][pos]['example']
                defs = len(result['list'])
                msg = ("**Definition #{} out of {}:\n**{}\n\n"
                       "**Example:\n**{}".format(pos+1, defs, definition,
                                                 example))
                msg = pagify(msg, ["\n"])
                for page in msg:
                    await self.bot.say(page)
            else:
                await self.bot.say("Your search terms gave no results.")
        except IndexError:
            await self.bot.say("There is no definition #{}".format(pos+1))
        except:
            await self.bot.say("Error.")

Example 24

Project: Red-DiscordBot
License: View license
Source File: image.py
    async def gif(self, *text):
        """Retrieves first search result from giphy
        
        gif [keyword]"""
        if len(text) > 0:
            if len(text[0]) > 1 and len(text[0]) < 20:
                try:
                    msg = "+".join(text)
                    search = "http://api.giphy.com/v1/gifs/search?q=" + msg + "&api_key=dc6zaTOxFJmzC"
                    async with aiohttp.get(search) as r:
                        result = await r.json()
                    if result["data"] != []:
                        url = result["data"][0]["url"]
                        await self.bot.say(url)
                    else:
                        await self.bot.say("Your search terms gave no results.")
                except:
                    await self.bot.say("Error.")
            else:
                await self.bot.say("Invalid search.")
        else:
            await self.bot.say("gif [text]")

Example 25

Project: Red-DiscordBot
License: View license
Source File: image.py
    async def gifr(self, *text):
        """Retrieves a random gif from a giphy search

        gifr [keyword]"""
        random.seed()
        if len(text) > 0:
            if len(text[0]) > 1 and len(text[0]) < 20:
                try:
                    msg = "+".join(text)
                    search = "http://api.giphy.com/v1/gifs/random?&api_key=dc6zaTOxFJmzC&tag=" + msg
                    async with aiohttp.get(search) as r:
                        result = await r.json()
                        if result["data"] != []:
                            url = result["data"]["url"]
                            await self.bot.say(url)
                        else:
                            await self.bot.say("Your search terms gave no results.")
                except:
                    await self.bot.say("Error.")
            else:
                await self.bot.say("Invalid search.")
        else:
            await self.bot.say("gifr [text]")

Example 26

Project: ooi3
License: View license
Source File: api.py
    @asyncio.coroutine
    def world_image(self, request):
        """ 显示正确的镇守府图片。
        舰娘游戏中客户端FLASH请求的镇守府图片是根据FLASH本身的URL生成的,需要根据用户所在的镇守府IP为其显示正确的图片。

        :param request: aiohttp.web.Request
        :return: aiohttp.web.HTTPFound or aiohttp.web.HTTPBadRequest
        """
        size = request.match_info['size']
        session = yield from get_session(request)
        world_ip = session['world_ip']
        if world_ip:
            ip_sections = map(int, world_ip.split('.'))
            image_name = '_'.join([format(x, '03') for x in ip_sections]) + '_' + size
            if image_name in self.worlds:
                body = self.worlds[image_name]
            else:
                url = 'http://203.104.209.102/kcs/resources/image/world/' + image_name + '.png'
                coro = aiohttp.get(url, connector=self.connector)
                try:
                    response = yield from asyncio.wait_for(coro, timeout=5)
                except asyncio.TimeoutError:
                    return aiohttp.web.HTTPBadRequest()
                body = yield from response.read()
                self.worlds[image_name] = body
            return aiohttp.web.Response(body=body, headers={'Content-Type': 'image/png', 'Cache-Control': 'no-cache'})
        else:
            return aiohttp.web.HTTPBadRequest()

Example 27

Project: aiohttp-cors
License: View license
Source File: test_main.py
    @asyncio.coroutine
    def _run_simple_requests_tests(self,
                                   tests_descriptions,
                                   use_resources):
        """Runs CORS simple requests (without a preflight request) based
        on the passed tests descriptions.
        """

        @asyncio.coroutine
        def run_test(test):
            """Run single test"""

            response = yield from aiohttp.get(
                self.server_url + "resource",
                headers=test.get("request_headers", {}))
            self.assertEqual(response.status, 200)
            self.assertEqual((yield from response.text()), TEST_BODY)

            for header_name, header_value in test.get(
                    "in_response_headers", {}).items():
                with self.subTest(header_name=header_name):
                    self.assertEqual(
                        response.headers.get(header_name),
                        header_value)

            for header_name in test.get("not_in_request_headers", {}).items():
                self.assertNotIn(header_name, response.headers)

        for test_descr in tests_descriptions:
            with self.subTest(group_name=test_descr["name"]):
                app = web.Application()
                cors = setup(app, defaults=test_descr["defaults"])

                if use_resources:
                    resource = cors.add(app.router.add_resource("/resource"))
                    cors.add(resource.add_route("GET", handler),
                             test_descr["route_config"])

                else:
                    cors.add(
                        app.router.add_route("GET", "/resource", handler),
                        test_descr["route_config"])

                yield from self.create_server(app)

                try:
                    for test_data in test_descr["tests"]:
                        with self.subTest(name=test_data["name"]):
                            yield from run_test(test_data)
                finally:
                    yield from self.shutdown_server()

Example 28

Project: charlesbot
License: View license
Source File: http.py
@asyncio.coroutine
def http_get_auth_request(auth_string,
                          url,
                          content_type="application/json",
                          auth_method="Token",
                          payload={}):
    headers = {
        'Content-type': content_type,
        'Authorization': "%s %s" % (auth_method, auth_string),
    }
    response = yield from aiohttp.get(url, headers=headers, params=payload)
    if not response.status == 200:
        text = yield from response.text()
        log.error("URL: %s" % url)
        log.error("Response status code was %s" % str(response.status))
        log.error(response.headers)
        log.error(text)
        response.close()
        return ""
    return (yield from response.text())

Example 29

Project: pypi2deb
License: View license
Source File: pypi.py
def download(name, version=None, destdir='.'):
    details = yield from get_pypi_info(name, version)
    if not details:
        raise Exception('cannot get PyPI project details for {}'.format(name))

    if not version:
        version = details['info']['version']

    release = details['releases'].get(version, {})
    if not release:
        log.debug('missing release of %s %s on PyPI', name, version)
        raise Exception('missing release')
    try:
        release = next((i for i in release if i['python_version'] == 'source'))
    except StopIteration:
        release = None

    if not release:
        raise Exception('source package not available on PyPI')

    orig_ext = ext = release['filename'].replace('{}-{}.'.format(name, version), '')
    if ext not in {'tar.gz', 'tar.bz2', 'tar.xz'}:
        ext = 'tar.xz'

    fname = '{}_{}.orig.{}'.format(pkg_name(name), version, ext)

    fpath = join(destdir, fname)
    if exists(fpath):
        return fname

    response = yield from aiohttp.get(release['url'], connector=conn)
    with open(fpath if ext == orig_ext else join(destdir, release['filename']), 'wb') as fp:
        data = yield from response.read()
        fp.write(data)

    if orig_ext != ext:
        cmd = ['mk-origtargz', '--rename', '--compression', 'xz',
               '--package', pkg_name(details['info']['name']), '--version', version,
               '--directory', destdir,
               '--repack', join(destdir, release['filename'])]
        # TODO: add --copyright-file if overriden copyright file is available
        yield from execute(cmd)

    return fname