Here are the examples of the python api django.http.HttpResponse taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
162 Examples
0
Example 1
Project: dj-revproxy Source File: proxy.py
@csrf_exempt
def proxy_request(request, **kwargs):
""" generic view to proxy a request.
Args:
destination: string, the proxied url
prefix: string, the prrefix behind we proxy the path
headers: dict, custom HTTP headers
no_redirect: boolean, False by default, do not redirect to "/"
if no path is given
decompress: boolean, False by default. If true the proxy will
decompress the source body if it's gzip encoded.
filters: list of revproxy.Filter instance
Return:
HttpResponse instance
"""
# proxy sid
try:
cookie_name = settings.REVPROXY_COOKIE
except AttributeError:
cookie_name = kwargs.get("cookie", "PROXY_SID")
sid = request.COOKIES.get(cookie_name)
# create a proxy session id only if it's needed so someone using
# a cookie based authentification can just reuse this session id.
# It can also be the session id from the session middleware.
if not sid:
sid = uuid.uuid4().hex
kwargs['proxy_sid'] = sid
# install request filters
filters_classes = kwargs.get('filters')
if not filters_classes:
filters = None
else:
filters = []
for fclass in filters_classes:
# add filter instance
fobj = fclass(request, **kwargs)
filters.append(fobj)
# eventually rewrite request and kwargs
if hasattr(fobj, 'setup'):
ret = fobj.setup()
if ret is not None:
try:
request, extra_kwargs = ret
except ValueError:
extra_kwargs = ret
if extra_kwargs is not None:
kwargs.update(extra_kwargs)
destination = kwargs.get('destination')
prefix = kwargs.get('prefix')
headers = kwargs.get('headers')
no_redirect = kwargs.get('no_redirect', False)
decompress = kwargs.get("decompress", False)
path = kwargs.get("path")
proxy_sid = kwargs.get('proxy_sid')
if path is None:
path = request.path
if prefix is not None and prefix:
path = path.split(prefix, 1)[1]
else:
if not path and not request.path.endswith("/"):
if not no_redirect:
qs = request.META["QUERY_STRING"]
redirect_url = "%s/" % request.path
if qs:
redirect_url = "%s?%s" % (redirect_url, qs)
return HttpResponsePermanentRedirect(redirect_url)
if path:
prefix = request.path.rsplit(path, 1)[0]
if not path.startswith("/"):
path = "/%s" % path
base_url = absolute_uri(request, destination)
proxied_url = ""
if not path:
proxied_url = "%s/" % base_url
else:
proxied_url = "%s%s" % (base_url, path)
qs = request.META.get("QUERY_STRING")
if qs is not None and qs:
proxied_url = "%s?%s" % (proxied_url, qs)
# fix headers@
headers = headers or {}
for key, value in request.META.iteritems():
if key.startswith('HTTP_'):
key = header_name(key)
elif key in ('CONTENT_TYPE', 'CONTENT_LENGTH'):
key = key.replace('_', '-')
if not value: continue
else:
continue
# rewrite location
if key.lower() != "host" and not is_hop_by_hop(key):
headers[key] = value
# we forward for
headers["X-Forwarded-For"] = request.get_host()
# django doesn't understand PUT sadly
method = request.method.upper()
if method == "PUT":
coerce_put_post(request)
# do the request
try:
resp = restkit.request(proxied_url, method=method,
body=request.raw_post_data, headers=headers,
follow_redirect=True,
decompress=decompress,
filters=filters)
except restkit.RequestFailed, e:
msg = getattr(e, 'msg', '')
if e.status_int >= 100:
resp = e.response
body = msg
else:
return http.HttpResponseBadRequest(msg)
body = resp.tee()
response = HttpResponse(body, status=resp.status_int)
# fix response headers
for k, v in resp.headers.items():
kl = k.lower()
if is_hop_by_hop(kl):
continue
if kl == "location":
response[k] = rewrite_location(request, prefix, v)
elif kl == "content-encoding":
if not decompress:
response[k] = v
else:
response[k] = v
# save the session
response.set_cookie(
cookie_name,
sid,
max_age=None,
expires=None,
domain=settings.SESSION_COOKIE_DOMAIN,
path=settings.SESSION_COOKIE_PATH,
secure=True)
return response
0
Example 2
Project: zulip Source File: rest.py
@csrf_exempt
def rest_dispatch(request, **kwargs):
# type: (HttpRequest, **Any) -> HttpResponse
"""Dispatch to a REST API endpoint.
Unauthenticated endpoints should not use this, as authentication is verified
in the following ways:
* for paths beginning with /api, HTTP Basic auth
* for paths beginning with /json (used by the web client), the session token
This calls the function named in kwargs[request.method], if that request
method is supported, and after wrapping that function to:
* protect against CSRF (if the user is already authenticated through
a Django session)
* authenticate via an API key (otherwise)
* coerce PUT/PATCH/DELETE into having POST-like semantics for
retrieving variables
Any keyword args that are *not* HTTP methods are passed through to the
target function.
Never make a urls.py pattern put user input into a variable called GET, POST,
etc, as that is where we route HTTP verbs to target functions.
"""
supported_methods = {} # type: Dict[str, Any]
# duplicate kwargs so we can mutate the original as we go
for arg in list(kwargs):
if arg in METHODS:
supported_methods[arg] = kwargs[arg]
del kwargs[arg]
if request.method == 'OPTIONS':
response = HttpResponse(status=204) # No content
response['Allow'] = ', '.join(supported_methods.keys())
response['Content-Length'] = "0"
return response
# Override requested method if magic method=??? parameter exists
method_to_use = request.method
if request.POST and 'method' in request.POST:
method_to_use = request.POST['method']
if method_to_use == "SOCKET" and "zulip.emulated_method" in request.META:
method_to_use = request.META["zulip.emulated_method"]
if method_to_use in supported_methods:
entry = supported_methods[method_to_use]
if isinstance(entry, tuple):
target_function, view_flags = entry
target_function = import_string(target_function)
else:
target_function = import_string(supported_methods[method_to_use])
view_flags = set()
# Set request._query for update_activity_user(), which is called
# by some of the later wrappers.
request._query = target_function.__name__
# We want to support authentication by both cookies (web client)
# and API keys (API clients). In the former case, we want to
# do a check to ensure that CSRF etc is honored, but in the latter
# we can skip all of that.
#
# Security implications of this portion of the code are minimal,
# as we should worst-case fail closed if we miscategorise a request.
# for some special views (e.g. serving a file that has been
# uploaded), we support using the same url for web and API clients.
if ('override_api_url_scheme' in view_flags
and request.META.get('HTTP_AUTHORIZATION', None) is not None):
# This request API based authentication.
target_function = authenticated_rest_api_view()(target_function)
# /json views (web client) validate with a session token (cookie)
elif not request.path.startswith("/api") and request.user.is_authenticated():
# Authenticated via sessions framework, only CSRF check needed
target_function = csrf_protect(authenticated_json_view(target_function))
# most clients (mobile, bots, etc) use HTTP Basic Auth and REST calls, where instead of
# username:password, we use email:apiKey
elif request.META.get('HTTP_AUTHORIZATION', None):
# Wrap function with decorator to authenticate the user before
# proceeding
target_function = authenticated_rest_api_view()(target_function)
# Pick a way to tell user they're not authed based on how the request was made
else:
# If this looks like a request from a top-level page in a
# browser, send the user to the login page
if 'text/html' in request.META.get('HTTP_ACCEPT', ''):
return HttpResponseRedirect('%s/?next=%s' % (settings.HOME_NOT_LOGGED_IN, request.path))
# Ask for basic auth (email:apiKey)
elif request.path.startswith("/api"):
return json_unauthorized(_("Not logged in: API authentication or user session required"))
# Session cookie expired, notify the client
else:
return json_unauthorized(_("Not logged in: API authentication or user session required"),
www_authenticate='session')
if request.method not in ["GET", "POST"]:
# process_as_post needs to be the outer decorator, because
# otherwise we might access and thus cache a value for
# request.REQUEST.
target_function = process_as_post(target_function)
return target_function(request, **kwargs)
return json_method_not_allowed(list(supported_methods.keys()))
0
Example 3
Project: jaikuenginepatch Source File: views.py
@alternate_nick
def actor_history(request, nick=None, format='html'):
nick = clean.nick(nick)
view = api.actor_lookup_nick(request.user, nick)
if not view:
raise exception.UserDoesNotExistError(nick, request.user)
called_subscribe, sub_ref = common_views.call_api_from_request(
request, 'subscription_request')
if called_subscribe:
if sub_ref.state == 'subscribed':
message = 'Subscribed.'
else:
message = 'Subscription requested.'
return util.RedirectFlash(view.url(), message)
handled = common_views.handle_view_action(
request,
{ 'entry_remove': request.path,
'entry_remove_comment': request.path,
'entry_mark_as_spam': request.path,
'subscription_remove': view.url(),
'actor_add_contact': request.path,
'actor_remove_contact': request.path,
'post': request.path,
'presence_set': request.path,
}
)
if handled:
return handled
privacy = 'public'
if request.user and request.user.is_authenticated():
if view.nick == request.user.nick:
privacy = 'private'
# ROOT because we care whether or not request.user is a contact of
# the view user's, not whether the request.user can see the contacts
elif api.actor_has_contact(api.ROOT, view.nick, request.user.nick):
privacy = 'contacts'
# we're going to hide a bunch of stuff if this user is private and we
# aren't allowed to see
user_is_private = False
if view.privacy < models.PRIVACY_PUBLIC and privacy == 'public':
user_is_private = True
per_page = ENTRIES_PER_PAGE
offset, prev = util.page_offset(request)
if privacy == 'public':
if user_is_private:
inbox = []
else:
inbox = api.inbox_get_actor_public(request.user, view.nick,
limit=(per_page + 1), offset=offset)
elif privacy == 'contacts':
inbox = api.inbox_get_actor_contacts(request.user, view.nick,
limit=(per_page + 1), offset=offset)
elif privacy == 'private':
inbox = api.inbox_get_actor_private(request.user, view.nick,
limit=(per_page + 1), offset=offset)
actor_streams = api.stream_get_actor_safe(request.user, view.nick)
entries, more = _get_inbox_entries(request, inbox)
contacts, channels, streams, entries = _assemble_inbox_data(request,
entries,
actor_streams,
view)
# If not logged in, cannot write
is_owner = request.user and request.user.is_authenticated() and view.nick == request.user.nick
try:
presence = api.presence_get(request.user, view.nick)
presence_stream = api.stream_get_presence(request.user, view.nick)
last_entry = api.entry_get_last(request.user, presence_stream.keyname())
view.last_entry = last_entry
except exception.ApiException:
pass
# for add/remove contact
if request.user and request.user.is_authenticated():
user_is_contact = api.actor_has_contact(request.user,
request.user.nick,
view.nick)
view.my_contact = user_is_contact
else:
user_is_contact = False
# for sidebar streams
view_streams = _get_sidebar_streams(actor_streams, streams, request.user)
# for sidebar_contacts
contacts_count = view.extra.get('contact_count', 0)
contacts_more = contacts_count > CONTACTS_PER_PAGE
# for sidebar channels
channels_count = view.extra.get('channel_count', 0)
channels_more = channels_count > CHANNELS_PER_PAGE
# Config for the template
green_top = True
sidebar_green_top = True
selectable_icons = display.SELECTABLE_ICONS
area = 'user'
c = template.RequestContext(request, locals())
if format == 'html':
t = loader.get_template('history.html')
return http.HttpResponse(t.render(c))
elif format == 'json':
t = loader.get_template('history.json')
r = util.HttpJsonResponse(t.render(c), request)
return r
elif format == 'atom':
t = loader.get_template('history.atom')
r = util.HttpAtomResponse(t.render(c), request)
return r
elif format == 'rss':
t = loader.get_template('history.rss')
r = util.HttpRssResponse(t.render(c), request)
return r
0
Example 4
Project: tomada-social Source File: views.py
@myuser_login_required
def rest(request, event_id=None):
if request.method == 'POST':
title = request.POST['title']
description = request.POST['description']
date_start = request.POST['date_start']
date_end = request.POST.get('date_end', None)
lat = request.POST['lat']
lng = request.POST['lng']
#image = request.FILES['image']
event = Event(title=title)
event.last_update = datetime.datetime.now()
event.description = description
event.date_start = datetime.datetime.strptime(date_start, '%d/%m/%Y %H:%M')
if date_end:
event.date_end = datetime.datetime.strptime(date_end, '%d/%m/%Y %H:%M')
event.location = [float(lat),float(lng)]
user_id = request.session.get('userid')
user = Account.objects(id=user_id)[0]
event.user = user
event.user_going = [user]
#im = Image.open(image)
#event.image.put(open(im))
event.save()
Account.objects(id=user_id).update_one(push__event_going=event)
data = json.dumps({'status':'ok'})
elif event_id:
user = Account.objects(id=request.session.get('userid'))[0]
event = Event.objects(id=event_id, user=user)[0]
if request.method == 'PUT':
# update field values and save to mongo
data = JSONParser().parse(request)
title = data['title']
description = data['description']
date_start = data['date_start']
date_end = data['date_end']
lat = data['lat']
lng = data['lng']
#image = request.FILES['image']
event.title = title
event.last_update = datetime.datetime.now()
event.description = description
event.date_start = datetime.datetime.strptime(date_start, '%d/%m/%Y %H:%M')
if not date_end:
event.date_end = None
else:
event.date_end = datetime.datetime.strptime(date_end, '%d/%m/%Y %H:%M')
event.location = [float(lat),float(lng)]
event.user = user
#event.image = request.FILES['image']
event.save()
data = json.dumps({'status':'ok'})
elif request.method == 'GET':
data = {
'id': str(event.id),
'title': event.title,
'description': event.description,
'location': {
'lat': event.location[0],
'lng': event.location[1]
},
'date_start': str(event.date_start.strftime('%d/%m/%Y %H:%M')) if event.date_start else '',
'date_end': str(event.date_end.strftime('%d/%m/%Y %H:%M')) if event.date_end else ''
}
data = json.dumps(data)
elif request.method == 'DELETE':
Account.objects().update(pull__event_going=event)
Account.objects().update(pull__event_maybe=event)
event.delete()
data = json.dumps({'status':'ok'})
else:
data = json.dumps({'status':'error'})
return HttpResponse(data,content_type='application/json')
0
Example 5
Project: tendenci Source File: views.py
@permission_required('theme_editor.change_themefileversion')
def edit_file(request, form_class=FileForm, template_name="theme_editor/index.html"):
if not has_perm(request.user, 'theme_editor.view_themefileversion'):
raise Http403
selected_theme = request.GET.get("theme_edit", get_theme())
original_theme_root = os.path.join(settings.ORIGINAL_THEMES_DIR, selected_theme)
if settings.USE_S3_THEME:
theme_root = os.path.join(settings.THEME_S3_PATH, selected_theme)
else:
theme_root = os.path.join(settings.ORIGINAL_THEMES_DIR, selected_theme)
# get the default file and clean up any input
default_file = request.GET.get("file", DEFAULT_FILE)
if default_file:
default_file = default_file.replace('\\', '/')
default_file = default_file.strip('/')
default_file = default_file.replace('////', '/')
default_file = default_file.replace('///', '/')
default_file = default_file.replace('//', '/')
is_file = qstr_is_file(default_file, ROOT_DIR=theme_root)
is_dir = qstr_is_dir(default_file, ROOT_DIR=theme_root)
if is_file:
pass
elif is_dir:
# if default_file is a directory then append the
# trailing slash so we can get the dirname below
default_file = '%s/' % default_file
else:
# if the default_file is not a directory or file within
# the themes folder then return a 404
raise Http404(_("Custom template not found. Make sure you've copied over the themes to the THEME_DIR."))
# get the current file name
current_file = os.path.basename(default_file)
# get file ext
name = current_file.split('/')[-1]
ext = name.split('.')[-1]
stylesheets = ['css', 'less']
# get the present working directory
# and make sure they cannot list root
pwd = os.path.dirname(default_file)
if pwd == '/':
pwd = ''
current_file_path = os.path.join(pwd, current_file)
# get the previous directory name and path
prev_dir = '/'
prev_dir_name = 'theme base'
pwd_split = pwd.split('/')
if len(pwd_split) > 1:
prev_dir_name = pwd_split[-2]
pwd_split.pop()
prev_dir = '/'.join(pwd_split)
elif not pwd_split[0]:
prev_dir = ''
# get the direcory list
dirs = get_dir_list(pwd, ROOT_DIR=theme_root)
# get the file list
files, non_editable_files = get_file_list(pwd, ROOT_DIR=theme_root)
all_files_folders = get_all_files_list(ROOT_DIR=theme_root)
# non-deletable files
non_deletable_files = ['homepage.html', 'default.html', 'footer.html', 'header.html', 'sidebar.html', 'nav.html', 'styles.less', 'styles.css']
# get the number of themes in the themes directory on the site
theme_choices = [ i for i in theme_choice_list()]
theme_count = len(theme_choices)
# get a list of revisions
archives = ThemeFileVersion.objects.filter(relative_file_path=default_file).order_by("-create_dt")
if request.is_ajax() and request.method == "POST":
file_form = form_class(request.POST)
response_status = 'FAIL'
response_message = _('Cannot update file.')
if file_form.is_valid():
if file_form.save(request, default_file, ROOT_DIR=theme_root, ORIG_ROOT_DIR=original_theme_root):
response_status = 'SUCCESS'
response_message = unicode(_('Your changes have been saved.'))
EventLog.objects.log()
response = json.dumps({'status':response_status, 'message':response_message})
return HttpResponse(response, content_type="application/json")
content = get_file_content(default_file, ROOT_DIR=theme_root)
file_form = form_class({"content": content, "rf_path": default_file})
theme_form = ThemeSelectForm(initial={'theme_edit': selected_theme})
return render_to_response(template_name, {
'file_form': file_form,
'theme_form': theme_form,
'current_theme': selected_theme,
'current_file_path': current_file_path,
'current_file': current_file,
'prev_dir_name': prev_dir_name,
'prev_dir': prev_dir,
'pwd': pwd,
'dirs': dirs,
'files': files,
'non_editable_files': non_editable_files,
'non_deletable_files': non_deletable_files,
'theme_count': theme_count,
'archives': archives,
'is_file': is_file,
'is_dir': is_dir,
'all_files_folders': all_files_folders,
'ext' : ext,
'stylesheets' : stylesheets
}, context_instance=RequestContext(request))
0
Example 6
Project: AutoO_with_django Source File: views.py
def admin(request, module="", action=""):
def logRecord(r_action='', r_table='', r_data=''):
record_name = request.session['user_name']
record_time = time.strftime('%Y-%m-%d %H:%M',time.localtime())
data_str = ''
for temp in sorted(r_data):
data_str += str(temp)+'='+r_data[temp]+' '
log_op = Logrecord(user=record_name, time=record_time, action=r_action, table=r_table, data=data_str)
log_op.save()
if 'loginToken' in request.session and request.session['user_admin']:
if module == 'project':
if action != '':
if action == "add" and request.session['user_sys']:
proj_alias = request.POST['alias_name']
proj_name = request.POST['name']
proj_remark = request.POST['remark']
obj = Project(alias=proj_alias, name=proj_name, remark=proj_remark)
obj.save()
logRecord(action, 'project', request.POST)
result = {}
result['code'] = 1
result['message'] = "添加成功"
elif action == "del":
pid = request.POST['id']
if request.session['user_sys'] or pid == request.session['user_proj']:
Project.objects.get(id=pid).delete()
logRecord(action, 'project', request.POST)
result = {}
result['code'] = 1
result['message'] = "删除成功"
else:
result = {}
result['code'] = 1
result['message'] = "无权限删除"
elif action.isdigit():
return HttpResponse(action)
else:
result = {}
result['code'] = 0
result['message'] = "操作失败"
return HttpResponse(json.dumps(result), content_type="application/json")
else:
projects = Project.objects.all().order_by('alias')
rsp = render(request, 'admin_project.html', locals())
return HttpResponse(rsp)
elif module == 'servers':
if action != '':
if action == "getinfo":
if 'ipaddr' in request.POST and request.POST['ipaddr'] != "":
ipaddr = request.POST['ipaddr']
data_trans = request.POST['data_trans']
snmpsession = netsnmp.Session(Version = 2, DestHost = ipaddr, Timeout=50000, ErrorStr='Cannot connect')
oid_name = netsnmp.Varbind('.1.3.6.1.2.1.1.5.0') #主机名oid
bind_name = netsnmp.VarList(oid_name)
oid_cpu = netsnmp.Varbind('.1.3.6.1.2.1.25.3.3.1.2') #CPU负载oid
bind_cpu = netsnmp.VarList(oid_cpu)
oid_mem = netsnmp.Varbind('.1.3.6.1.2.1.25.2.2.0') #内存总数oid
bind_mem = netsnmp.VarList(oid_mem)
oid_ip = netsnmp.Varbind('.1.3.6.1.2.1.4.20.1.1') #IP地址oid
bind_ip = netsnmp.VarList(oid_ip)
snmp_name = snmpsession.get(bind_name)
snmp_cpu = snmpsession.walk(bind_cpu)
snmp_mem = snmpsession.get(bind_mem)
snmp_ip = snmpsession.walk(bind_ip)
result_name = snmp_name[0]
i = 0
for data in snmp_cpu:
if data != '':
i += 1
result_cpu = i
result_mem = int(snmp_mem[0])/1024
if data_trans == "1":
if result_mem <= 512:
result_mem = 512
else:
result_mem = (result_mem/1024 + 1) * 1024
result_ip = []
i = 0
for data in snmp_ip:
if data != '127.0.0.1':
oid = '.1.3.6.1.2.1.4.20.1.2.' + str(data)
oid_ip_index = netsnmp.Varbind(oid)
bind_ip_index = netsnmp.VarList(oid_ip_index)
snmp_ip_index = snmpsession.get(bind_ip_index)
ip_index = snmp_ip_index[0]
oid_ip_name = netsnmp.Varbind('.1.3.6.1.2.1.2.2.1.2.' + str(ip_index))
bind_ip_name = netsnmp.VarList(oid_ip_name)
snmp_ip_name = snmpsession.get(bind_ip_name)
ip_name = snmp_ip_name[0]
result_ip.append(data)
result = {}
result['code'] = 0
result['host'] = result_name
result['cpu'] = result_cpu
result['mem'] = result_mem
result['ip'] = "|".join(result_ip)
result['ip_disp'] = result['ip'].replace("|","\r\n")
return HttpResponse(json.dumps(result), content_type="application/json")
else:
projects = Project.objects.all().order_by('alias')
rsp = render(request, 'user_index.html', locals())
return HttpResponse(rsp)
elif action == "add":
asset_pid = request.POST['pid']
asset_ip = request.POST['ip']
asset_hostname = request.POST['hostname']
asset_cpu = request.POST['cpu']
asset_mem = request.POST['mem']
asset_disk = request.POST['disk']
if request.POST['type'] == '1':
asset_type = "物理机"
elif request.POST['type'] == '2':
asset_type = "虚拟机"
else:
asset_type = "其他"
asset_srv = request.POST['srv']
asset_desc = request.POST['desc']
pid = Project.objects.get(id=asset_pid)
if request.session['user_sys'] or asset_pid == request.session['user_proj']:
obj = Server(pid=pid,
ip=asset_ip,
hostname=asset_hostname,
cpu=asset_cpu,
mem=asset_mem,
disk=asset_disk,
type=asset_type,
srv=asset_srv,
desc=asset_desc,
status='1')
obj.save()
logRecord(action, 'asset', request.POST)
result = {}
result['code'] = 1
result['message'] = "添加成功"
else:
result = {}
result['code'] = 0
result['message'] = "未授权的操作"
elif action == "del":
id = request.POST['id']
del_data = Server.objects.filter(id=id)
del_id = str(del_data[0].pid.id)
if request.session['user_sys'] or del_id == request.session['user_proj']:
try:
Server.objects.get(id=id).delete()
logRecord(action, 'asset', request.POST)
result = {}
result['code'] = 1
result['message'] = "删除成功"
except:
result = {}
result['code'] = 0
result['message'] = "删除异常"
else:
result = {}
result['code'] = 0
result['message'] = "未授权的操作"
elif action.isdigit():
if 'update' in request.GET:
asset_pid = request.POST['pid']
asset_ip = request.POST['ip']
asset_hostname = request.POST['hostname']
asset_cpu = request.POST['cpu']
asset_mem = request.POST['mem']
asset_disk = request.POST['disk']
if request.POST['type'] == '1':
asset_type = "物理机"
elif request.POST['type'] == '2':
asset_type = "虚拟机"
else:
asset_type = "其他"
asset_srv = request.POST['srv']
asset_desc = request.POST['desc']
asset_status = request.POST['status']
asset_cacti = request.POST['cacti']
asset_nagios = request.POST['nagios']
if request.session['user_sys'] or asset_pid == request.session['user_proj']:
try:
Server.objects.filter(id=action).update(ip=asset_ip,
hostname=asset_hostname,
cpu=asset_cpu,
mem=asset_mem,
disk=asset_disk,
type=asset_type,
srv=asset_srv,
desc=asset_desc,
status = asset_status,
cacti = asset_cacti,
nagios = asset_nagios
)
logRecord('update', 'asset', request.POST)
result = {}
result['code'] = 1
result['message'] = "资产修改成功"
except:
result = {}
result['code'] = 0
result['message'] = "资产修改未提交"
else:
result = {}
result['code'] = 0
result['message'] = "未授权的操作"
return HttpResponse(json.dumps(result), content_type="application/json")
else:
try:
queryset = Server.objects.select_related().get(id=action)
except:
return HttpResponse('无效ID')
rsp = render(request, 'admin_display_server.html', locals())
return HttpResponse(rsp)
else:
result = {}
result['code'] = 0
result['message'] = "操作失败"
return HttpResponse(json.dumps(result), content_type="application/json")
else:
if 'page' in request.GET and request.GET['page'].isdigit():
page_get = int(float(request.GET['page']))
else:
page_get = 1
if 'query' in request.GET:
if request.session['query'].has_key('op'):
request.session['query'] = {'op':'True'}
if 'pid' in request.POST:
if request.POST['pid'] == "":
try:
request.session['query_data'].pop('pid')
except:
a = 1
else:
request.session['query_data']['pid'] = request.POST['pid']
if 'ip' in request.POST:
if request.POST['ip'] == "":
try:
request.session['query_data'].pop('ip__contains')
except:
a = 1
else:
request.session['query_data']['ip__contains'] = request.POST['ip']
if 'srv' in request.POST:
if request.POST['srv'] == "":
try:
request.session['query_data'].pop('srv')
except:
a = 1
else:
request.session['query_data']['srv'] = request.POST['srv']
if 'status' in request.POST:
if request.POST['status'] == "":
try:
request.session['query_data'].pop('status')
except:
a = 1
else:
request.session['query_data']['status'] = request.POST['status']
exper = request.session['query_data']
else:
request.session['query'] = {'op':'true'}
if 'pid' in request.POST and request.POST['pid'] != "":
request.session['query_data']['pid'] = request.POST['pid']
if 'ip' in request.POST and request.POST['ip'] != "":
request.session['query_data']['ip__contains'] = request.POST['ip']
if 'srv' in request.POST and request.POST['srv'] != "":
request.session['query_data']['srv'] = request.POST['srv']
if 'status' in request.POST and request.POST['status'] != "":
request.session['query_data']['status'] = request.POST['status']
exper = request.session['query_data']
servers = Server.objects.filter(**exper)
else:
request.session['query'] = {}
request.session['query_data'] = {}
if request.session['user_sys']:
servers = Server.objects.select_related().all()
else:
servers = Server.objects.select_related().filter(pid=request.session['user_proj'])
pagin = Paginator(servers,20)
page_max = pagin.num_pages
if page_get > page_max:
page = page_max
else:
page = page_get
data_list = pagin.page(page)
if 'query' in request.GET:
url_fp = "?query&page=1"
if page <= 1:
url_pp = "?query&page=1"
else:
url_pp = "?query&page=" + str((page - 1))
if page >= page_max:
url_np = "?query&page=" + str(page_max)
else:
url_np = "?query&page=" + str((page + 1))
url_lp = "?query&page=" + str(page_max)
else:
url_fp = "?page=1"
if page <= 1:
url_pp = "?page=1"
else:
url_pp = "?page=" + str((page - 1))
if page >= page_max:
url_np = "?page=" + str(page_max)
else:
url_np = "?page=" + str((page + 1))
url_lp = "?page=" + str(page_max)
if request.session['user_sys']:
projects = Project.objects.all().order_by('alias')
else:
projects = Project.objects.filter(id=request.session['user_proj']).order_by('alias')
rsp = render(request, 'admin_servers.html', locals())
return HttpResponse(rsp)
else:
rsp = render(request, 'admin_base.html', locals())
return HttpResponse(rsp)
else:
return HttpResponseRedirect('/')
0
Example 7
Project: timebank Source File: compressor.py
def gzip_compressor(request):
plugins = split_commas(request.GET.get("plugins", ""))
languages = split_commas(request.GET.get("languages", ""))
themes = split_commas(request.GET.get("themes", ""))
isJS = request.GET.get("js", "") == "true"
compress = request.GET.get("compress", "true") == "true"
suffix = request.GET.get("suffix", "") == "_src" and "_src" or ""
content = []
response = HttpResponse()
response["Content-Type"] = "text/javascript"
if not isJS:
response.write(render_to_string('tinymce/tiny_mce_gzip.js', {
'base_url': tinymce.settings.JS_BASE_URL,
}, context_instance=RequestContext(request)))
return response
patch_vary_headers(response, ['Accept-Encoding'])
now = datetime.utcnow()
response['Date'] = now.strftime('%a, %d %b %Y %H:%M:%S GMT')
cacheKey = '|'.join(plugins + languages + themes)
cacheData = cache.get(cacheKey)
if not cacheData is None:
if cacheData.has_key('ETag'):
if_none_match = request.META.get('HTTP_IF_NONE_MATCH', None)
if if_none_match == cacheData['ETag']:
response.status_code = 304
response.content = ''
response['Content-Length'] = '0'
return response
if cacheData.has_key('Last-Modified'):
if_modified_since = request.META.get('HTTP_IF_MODIFIED_SINCE', None)
if if_modified_since == cacheData['Last-Modified']:
response.status_code = 304
response.content = ''
response['Content-Length'] = '0'
return response
# Add core, with baseURL added
content.append(get_file_contents("tiny_mce%s.js" % suffix).replace(
"tinymce._init();", "tinymce.baseURL='%s';tinymce._init();"
% tinymce.settings.JS_BASE_URL))
# Patch loading functions
content.append("tinyMCE_GZ.start();")
# Add core languages
for lang in languages:
content.append(get_file_contents("langs/%s.js" % lang))
# Add themes
for theme in themes:
content.append(get_file_contents("themes/%s/editor_template%s.js"
% (theme, suffix)))
for lang in languages:
content.append(get_file_contents("themes/%s/langs/%s.js"
% (theme, lang)))
# Add plugins
for plugin in plugins:
content.append(get_file_contents("plugins/%s/editor_plugin%s.js"
% (plugin, suffix)))
for lang in languages:
content.append(get_file_contents("plugins/%s/langs/%s.js"
% (plugin, lang)))
# Add filebrowser
if tinymce.settings.USE_FILEBROWSER:
content.append(render_to_string('tinymce/filebrowser.js', {},
context_instance=RequestContext(request)).encode("utf-8"))
# Restore loading functions
content.append("tinyMCE_GZ.end();")
# Compress
if compress:
content = compress_string(''.join(content))
response['Content-Encoding'] = 'gzip'
response['Content-Length'] = str(len(content))
response.write(content)
timeout = 3600 * 24 * 10
patch_response_headers(response, timeout)
cache.set(cacheKey, {
'Last-Modified': response['Last-Modified'],
'ETag': response['ETag'],
})
return response
0
Example 8
Project: django-adminactions Source File: api.py
def export_as_xls2(queryset, fields=None, header=None, # noqa
filename=None, options=None, out=None):
# sheet_name=None, header_alt=None,
# formatting=None, out=None):
"""
Exports a queryset as xls from a queryset with the given fields.
:param queryset: queryset to export (can also be list of namedtuples)
:param fields: list of fields names to export. None for all fields
:param header: if True, the exported file will have the first row as column names
:param out: object that implements File protocol.
:param header_alt: if is not None, and header is True, the first row will be as header_alt (same nr columns)
:param formatting: if is None will use formatting_default
:return: HttpResponse instance if out not supplied, otherwise out
"""
def _get_qs_formats(queryset):
formats = {}
if hasattr(queryset, 'model'):
for i, fieldname in enumerate(fields):
try:
f, __, __, __, = compat.get_field_by_name(queryset.model, fieldname)
fmt = xls_options_default.get(f.name, xls_options_default.get(f.__class__.__name__, 'general'))
formats[i] = fmt
except FieldDoesNotExist:
pass
# styles[i] = xlwt.easyxf(num_format_str=xls_options_default.get(col_class, 'general'))
# styles[i] = xls_options_default.get(col_class, 'general')
return formats
if out is None:
if filename is None:
filename = filename or "%s.xls" % queryset.model._meta.verbose_name_plural.lower().replace(" ", "_")
response = HttpResponse(content_type='application/vnd.ms-excel')
response['Content-Disposition'] = ('attachment;filename="%s"' % filename).encode('us-ascii', 'replace')
else:
response = out
config = xls_options_default.copy()
if options:
config.update(options)
if fields is None:
fields = [f.name for f in queryset.model._meta.fields]
book = xlwt.Workbook(encoding="utf-8", style_compression=2)
sheet_name = config.pop('sheet_name')
use_display = config.get('use_display', False)
sheet = book.add_sheet(sheet_name)
style = xlwt.XFStyle()
row = 0
heading_xf = xlwt.easyxf('font:height 200; font: bold on; align: wrap on, vert centre, horiz center')
sheet.write(row, 0, u'#', style)
if header:
if not isinstance(header, (list, tuple)):
header = [force_text(f.verbose_name) for f in queryset.model._meta.fields if f.name in fields]
for col, fieldname in enumerate(header, start=1):
sheet.write(row, col, fieldname, heading_xf)
sheet.col(col).width = 5000
sheet.row(row).height = 500
formats = _get_qs_formats(queryset)
settingstime_zone = get_default_timezone()
_styles = {}
for rownum, row in enumerate(queryset):
sheet.write(rownum + 1, 0, rownum + 1)
for idx, fieldname in enumerate(fields):
fmt = formats.get(idx, 'general')
try:
value = get_field_value(row,
fieldname,
usedisplay=use_display,
raw_callable=False)
if callable(fmt):
value = xlwt.Formula(fmt(value))
if hash(fmt) not in _styles:
if callable(fmt):
_styles[hash(fmt)] = xlwt.easyxf(num_format_str='formula')
else:
_styles[hash(fmt)] = xlwt.easyxf(num_format_str=fmt)
if isinstance(value, datetime.datetime):
try:
value = dateformat.format(value.astimezone(settingstime_zone), config['datetime_format'])
except ValueError:
# astimezone() cannot be applied to a naive datetime
value = dateformat.format(value, config['datetime_format'])
if isinstance(value, (list, tuple)):
value = "".join(value)
sheet.write(rownum + 1, idx + 1, value, _styles[hash(fmt)])
except Exception as e:
# logger.warning("TODO refine this exception: %s" % e)
sheet.write(rownum + 1, idx + 1, smart_str(e), _styles[hash(fmt)])
book.save(response)
return response
0
Example 9
def object_list(request, queryset, paginate_by=None, page=None,
allow_empty=True, template_name=None, template_loader=loader,
extra_context=None, context_processors=None,
template_object_name='object', mimetype=None):
"""
Generic list of objects.
Templates: ``<app_label>/<model_name>_list.html``
Context:
object_list
list of objects
is_paginated
are the results paginated?
results_per_page
number of objects per page (if paginated)
has_next
is there a next page?
has_previous
is there a prev page?
page
the current page
next
the next page
previous
the previous page
pages
number of pages, total
hits
number of objects, total
last_on_page
the result number of the last of object in the
object_list (1-indexed)
first_on_page
the result number of the first object in the
object_list (1-indexed)
page_range:
A list of the page numbers (1-indexed).
"""
if extra_context is None:
extra_context = {}
queryset = queryset._clone()
if paginate_by:
paginator = Paginator(queryset, paginate_by,
allow_empty_first_page=allow_empty)
if not page:
page = request.GET.get('page', 1)
try:
page_number = int(page)
except ValueError:
if page == 'last':
page_number = paginator.num_pages
else:
# Page is not 'last', nor can it be converted to an int.
raise Http404
try:
page_obj = paginator.page(page_number)
except InvalidPage:
raise Http404
c = RequestContext(request, {
'%s_list' % template_object_name: page_obj.object_list,
'paginator': paginator,
'page_obj': page_obj,
'is_paginated': page_obj.has_other_pages(),
# Legacy template context stuff. New templates should use page_obj
# to access this instead.
'results_per_page': paginator.per_page,
'has_next': page_obj.has_next(),
'has_previous': page_obj.has_previous(),
'page': page_obj.number,
'next': page_obj.next_page_number(),
'previous': page_obj.previous_page_number(),
'first_on_page': page_obj.start_index(),
'last_on_page': page_obj.end_index(),
'pages': paginator.num_pages,
'hits': paginator.count,
'page_range': paginator.page_range,
}, context_processors)
else:
c = RequestContext(request, {
'%s_list' % template_object_name: queryset,
'paginator': None,
'page_obj': None,
'is_paginated': False,
}, context_processors)
if not allow_empty and len(queryset) == 0:
raise Http404
for key, value in extra_context.items():
if callable(value):
c[key] = value()
else:
c[key] = value
if not template_name:
model = queryset.model
template_name = "%s/%s_list.html" % (
model._meta.app_label,
model._meta.object_name.lower()
)
t = template_loader.get_template(template_name)
return HttpResponse(t.render(c), mimetype=mimetype)
0
Example 10
Project: quicktill Source File: spreadsheets.py
def sessionrange(ds,start=None,end=None,tillname="Till"):
"""
A spreadsheet summarising sessions between the start and end date.
"""
depts=ds.query(Department).order_by(Department.id).all()
depttotals=ds.query(Session,Department,func.sum(
Transline.items*Transline.amount)).\
select_from(Session).\
options(undefer('total')).\
options(undefer('actual_total')).\
filter(Session.endtime!=None).\
filter(select([func.count(SessionTotal.sessionid)],
whereclause=SessionTotal.sessionid==Session.id).\
correlate(Session.__table__).as_scalar()!=0).\
join(Transaction,Transline,Department).\
order_by(Session.id,Department.id).\
group_by(Session,Department)
if start: depttotals=depttotals.filter(Session.date>=start)
if end: depttotals=depttotals.filter(Session.date<=end)
doc=OpenDocuementSpreadsheet()
datestyle=dateStyle(doc)
currencystyle=currencyStyle(doc)
header=Style(name="ColumnHeader",family="table-cell")
header.addElement(
ParagraphProperties(textalign="center"))
header.addElement(
TextProperties(fontweight="bold"))
doc.automaticstyles.addElement(header)
def colwidth(w):
if not hasattr(colwidth,'num'): colwidth.num=0
colwidth.num+=1
width=Style(name="W{}".format(colwidth.num),family="table-column")
width.addElement(TableColumnProperties(columnwidth=w))
doc.automaticstyles.addElement(width)
return width
widthshort=colwidth("2.0cm")
widthtotal=colwidth("2.2cm")
widthgap=colwidth("0.5cm")
table=Table(name=tillname)
# Session ID and date
table.addElement(TableColumn(numbercolumnsrepeated=2,stylename=widthshort))
# Totals
table.addElement(TableColumn(numbercolumnsrepeated=2,stylename=widthtotal))
# Gap
table.addElement(TableColumn(stylename=widthgap))
# Departments
table.addElement(TableColumn(numbercolumnsrepeated=len(depts),
stylename=widthshort))
tr=TableRow()
table.addElement(tr)
def tcheader(text):
tc=TableCell(valuetype="string",stylename=header)
tc.addElement(P(stylename=header,text=text))
return tc
tr.addElement(tcheader("ID"))
tr.addElement(tcheader("Date"))
tr.addElement(tcheader("Till Total"))
tr.addElement(tcheader("Actual Total"))
tr.addElement(TableCell())
for d in depts:
tr.addElement(tcheader(d.description))
def tcint(i):
"""
Integer table cell
"""
return TableCell(valuetype="float",value=i)
def tcdate(d):
"""
Date table cell
"""
return TableCell(valuetype="date",datevalue=d,stylename=datestyle)
def tcmoney(m):
"""
Money table cell
"""
return TableCell(valuetype="currency",currency="GBP",value=str(m),
stylename=currencystyle)
tr=None
prev_s=None
for s,d,t in depttotals:
if s!=prev_s:
prev_s=s
tr=TableRow()
table.addElement(tr)
tr.addElement(tcint(s.id))
tr.addElement(tcdate(s.date))
tr.addElement(tcmoney(s.total))
tr.addElement(tcmoney(s.actual_total))
tr.addElement(TableCell())
di=iter(depts)
while True:
dept=next(di)
if dept==d:
tr.addElement(tcmoney(t))
break
else:
tr.addElement(TableCell())
doc.spreadsheet.addElement(table)
filename="{}-summary".format(tillname)
if start: filename=filename+"-from-{}".format(start)
if end: filename=filename+"-to-{}".format(end)
filename=filename+".ods"
r=HttpResponse(content_type='application/vnd.oasis.opendocuement.spreadsheet')
r['Content-Disposition']='attachment; filename={}'.format(filename)
doc.write(r)
return r
0
Example 11
Project: graphite Source File: views.py
def renderView(request):
start = time()
(graphOptions, requestOptions) = parseOptions(request)
useCache = 'noCache' not in requestOptions
cacheTimeout = requestOptions['cacheTimeout']
requestContext = {
'startTime' : requestOptions['startTime'],
'endTime' : requestOptions['endTime'],
'localOnly' : requestOptions['localOnly'],
'data' : []
}
data = requestContext['data']
# First we check the request cache
if useCache:
requestKey = hashRequest(request)
cachedResponse = cache.get(requestKey)
if cachedResponse:
log.cache('Request-Cache hit [%s]' % requestKey)
log.rendering('Returned cached response in %.6f' % (time() - start))
return cachedResponse
else:
log.cache('Request-Cache miss [%s]' % requestKey)
# Now we prepare the requested data
if requestOptions['graphType'] == 'pie':
for target in requestOptions['targets']:
if target.find(':') >= 0:
try:
name,value = target.split(':',1)
value = float(value)
except:
raise ValueError, "Invalid target '%s'" % target
data.append( (name,value) )
else:
seriesList = evaluateTarget(requestContext, target)
for series in seriesList:
func = PieFunctions[requestOptions['pieMode']]
data.append( (series.name, func(requestContext, series) or 0 ))
elif requestOptions['graphType'] == 'line':
# Let's see if at least our data is cached
if useCache:
targets = requestOptions['targets']
startTime = requestOptions['startTime']
endTime = requestOptions['endTime']
dataKey = hashData(targets, startTime, endTime)
cachedData = cache.get(dataKey)
if cachedData:
log.cache("Data-Cache hit [%s]" % dataKey)
else:
log.cache("Data-Cache miss [%s]" % dataKey)
else:
cachedData = None
if cachedData is not None:
requestContext['data'] = data = cachedData
else: # Have to actually retrieve the data now
for target in requestOptions['targets']:
t = time()
seriesList = evaluateTarget(requestContext, target)
log.rendering("Retrieval of %s took %.6f" % (target, time() - t))
data.extend(seriesList)
if useCache:
cache.set(dataKey, data, cacheTimeout)
# If data is all we needed, we're done
if 'pickle' in requestOptions:
response = HttpResponse(mimetype='application/pickle')
seriesInfo = [series.getInfo() for series in data]
pickle.dump(seriesInfo, response, protocol=-1)
log.rendering('Total pickle rendering time %.6f' % (time() - start))
return response
format = requestOptions.get('format')
if format == 'csv':
response = HttpResponse(mimetype='text/csv')
writer = csv.writer(response, dialect='excel')
for series in data:
for i, value in enumerate(series):
timestamp = localtime( series.start + (i * series.step) )
writer.writerow( (series.name, strftime("%Y-%m-%d %H:%M:%S", timestamp), value) )
return response
if format == 'json':
series_data = []
for series in data:
timestamps = range(series.start, series.end, series.step)
datapoints = zip(series, timestamps)
series_data.append( dict(target=series.name, datapoints=datapoints) )
if 'jsonp' in requestOptions:
response = HttpResponse(
content="%s(%s)" % (requestOptions['jsonp'], json.dumps(series_data)),
mimetype='text/javascript')
else:
response = HttpResponse(content=json.dumps(series_data), mimetype='application/json')
response['Pragma'] = 'no-cache'
response['Cache-Control'] = 'no-cache'
return response
if format == 'raw':
response = HttpResponse(mimetype='text/plain')
for series in data:
response.write( "%s,%d,%d,%d|" % (series.name, series.start, series.end, series.step) )
response.write( ','.join(map(str,series)) )
response.write('\n')
log.rendering('Total rawData rendering time %.6f' % (time() - start))
return response
if format == 'svg':
graphOptions['outputFormat'] = 'svg'
# We've got the data, now to render it
graphOptions['data'] = data
if settings.REMOTE_RENDERING: # Rendering on other machines is faster in some situations
image = delegateRendering(requestOptions['graphType'], graphOptions)
else:
image = doImageRender(requestOptions['graphClass'], graphOptions)
useSVG = graphOptions.get('outputFormat') == 'svg'
if useSVG and 'jsonp' in requestOptions:
response = HttpResponse(
content="%s(%s)" % (requestOptions['jsonp'], json.dumps(image)),
mimetype='text/javascript')
else:
response = buildResponse(image, useSVG and 'image/svg+xml' or 'image/png')
if useCache:
cache.set(requestKey, response, cacheTimeout)
log.rendering('Total rendering time %.6f seconds' % (time() - start))
return response
0
Example 12
Project: django-cropduster Source File: __init__.py
@csrf_exempt
@login_required
def upload(request):
if request.method == 'GET':
return index(request)
# The data we'll be returning as JSON
data = {
'warning': [],
}
form = UploadForm(request.POST, request.FILES)
if not form.is_valid():
errors = form['image'].errors or form.errors
return json_error(request, 'upload', action="uploading file",
errors=[force_unicode(errors)])
form_data = form.cleaned_data
is_standalone = bool(form_data.get('standalone'))
orig_file_path = form_data['image'].name
if six.PY2 and isinstance(orig_file_path, unicode):
orig_file_path = orig_file_path.encode('utf-8')
orig_image = get_relative_media_url(orig_file_path)
img = PIL.Image.open(orig_file_path)
(w, h) = (orig_w, orig_h) = img.size
if is_animated_gif(img) and not has_animated_gif_support():
data['warning'].append(
u"This server does not have animated gif support; your uploaded image "
u"has been made static.")
tmp_image = Image(image=orig_image)
preview_w = form_data.get('preview_width') or PREVIEW_WIDTH
preview_h = form_data.get('preview_height') or PREVIEW_HEIGHT
# First pass resize if it's too large
resize_ratio = min(preview_w / w, preview_h / h)
def fit_preview(im):
(w, h) = im.size
if resize_ratio < 1:
w = int(round(w * resize_ratio))
h = int(round(h * resize_ratio))
preview_img = im.resize((w, h), PIL.Image.ANTIALIAS)
else:
preview_img = im
return preview_img
if not is_standalone:
preview_file_path = tmp_image.get_image_path('_preview')
process_image(img, preview_file_path, fit_preview)
data.update({
'crop': {
'orig_image': orig_image,
'orig_w': orig_w,
'orig_h': orig_h,
'image_id': None,
},
'url': tmp_image.get_image_url('_preview'),
'orig_image': orig_image,
'orig_w': orig_w,
'orig_h': orig_h,
'width': w,
'height': h,
})
if not is_standalone:
return HttpResponse(json.dumps(data), content_type='application/json')
size = Size('crop', w=img.size[0], h=img.size[1])
md5 = form_data.get('md5')
try:
standalone_image = StandaloneImage.objects.get(md5=md5)
except StandaloneImage.DoesNotExist:
standalone_image = StandaloneImage(md5=md5, image=orig_image)
standalone_image.save()
cropduster_image, created = Image.objects.get_or_create(
content_type=ContentType.objects.get_for_model(StandaloneImage),
object_id=standalone_image.pk)
if not cropduster_image.image:
cropduster_image.image = orig_image
cropduster_image.save()
elif cropduster_image.image.name != orig_image:
data['crop']['orig_image'] = data['orig_image'] = cropduster_image.image.name
data['url'] = cropduster_image.get_image_url('_preview')
img = PIL.Image.open(cropduster_image.image.path)
preview_file_path = cropduster_image.get_image_path('_preview')
if not os.path.exists(preview_file_path):
process_image(img, preview_file_path, fit_preview)
thumb = cropduster_image.save_size(size, standalone=True)
sizes = form_data.get('sizes') or []
if len(sizes) == 1:
size = sizes[0]
else:
size = Size('crop')
data.update({
'thumbs': [{
'crop_x': thumb.crop_x,
'crop_y': thumb.crop_y,
'crop_w': thumb.crop_w,
'crop_h': thumb.crop_h,
'width': thumb.width,
'height': thumb.height,
'id': None,
'changed': True,
'size': json.dumps(size),
'name': thumb.name,
}]
})
data['crop'].update({
'image_id': cropduster_image.pk,
'sizes': json.dumps([size]),
})
return HttpResponse(json.dumps(data), content_type='application/json')
0
Example 13
Project: baruwa Source File: views.py
@login_required
def index(request, list_all=0, page=1, view_type='full', direction='dsc',
order_by='timestamp', quarantine_type=None):
"""index"""
active_filters = []
ordering = order_by
form = None
num_of_recent_msgs = getattr(settings, 'BARUWA_NUM_RECENT_MESSAGES', 50)
template_name = 'messages/index.html'
if direction == 'dsc':
ordering = order_by
order_by = '-%s' % order_by
if not list_all:
last_ts = request.META.get('HTTP_X_LAST_TIMESTAMP', None)
if not last_ts is None:
last_ts = last_ts.strip()
if not re.match(
r'^(\d{4})\-(\d{2})\-(\d{2})(\s)(\d{2})\:(\d{2})\:(\d{2})$',
last_ts):
last_ts = None
if not last_ts is None and request.is_ajax():
message_list = Message.messages.for_user(request).values(
'id', 'timestamp', 'from_address', 'to_address', 'subject',
'size', 'sascore', 'highspam', 'spam', 'virusinfected',
'otherinfected', 'whitelisted', 'blacklisted', 'nameinfected',
'scaned').filter(timestamp__gt=last_ts)[:num_of_recent_msgs]
else:
message_list = Message.messages.for_user(request).values(
'id', 'timestamp', 'from_address', 'to_address', 'subject',
'size', 'sascore', 'highspam', 'spam', 'virusinfected',
'otherinfected', 'whitelisted', 'blacklisted', 'nameinfected',
'scaned')[:num_of_recent_msgs]
else:
if view_type == 'archive':
message_list = Archive.messages.for_user(request).values(
'id', 'timestamp', 'from_address', 'to_address', 'subject',
'size', 'sascore', 'highspam', 'spam', 'virusinfected',
'otherinfected', 'whitelisted', 'blacklisted', 'nameinfected',
'scaned').order_by(order_by)
elif view_type == 'quarantine':
template_name = 'messages/quarantine.html'
message_list = Message.quarantine.for_user(request).values(
'id', 'timestamp', 'from_address', 'to_address', 'subject',
'size', 'sascore', 'highspam', 'spam', 'virusinfected',
'otherinfected', 'whitelisted', 'blacklisted', 'isquarantined',
'nameinfected', 'scaned').order_by(order_by)
if quarantine_type == 'spam':
message_list = message_list.filter(spam=1)
if quarantine_type == 'policyblocked':
message_list = message_list.filter(spam=0)
form = BulkQuarantineProcessForm()
form.fields['altrecipients'].widget.attrs['size'] = '55'
message_list = apply_filter(message_list, request, active_filters)
p = Paginator(message_list, num_of_recent_msgs)
if page == 'last':
page = p.num_pages
po = p.page(page)
choices = [(message['id'], message['id']) for message in po.object_list]
form.fields['message_id']._choices = choices
form.fields['message_id'].widget.choices = choices
request.session['quarantine_choices'] = choices
request.session.modified = True
else:
message_list = Message.messages.for_user(request).values(
'id', 'timestamp', 'from_address', 'to_address', 'subject',
'size', 'sascore', 'highspam', 'spam', 'virusinfected',
'otherinfected', 'whitelisted', 'blacklisted', 'nameinfected',
'scaned').order_by(order_by)
message_list = apply_filter(message_list, request, active_filters)
if request.is_ajax():
sys_status = jsonify_status(status(request))
if not list_all:
message_list = map(jsonify_msg_list, message_list)
pg = None
else:
p = Paginator(message_list, num_of_recent_msgs)
if page == 'last':
page = p.num_pages
po = p.page(page)
message_list = po.object_list
message_list = map(jsonify_msg_list, message_list)
page = int(page)
ap = 2
startp = max(page - ap, 1)
if startp <= 3:
startp = 1
endp = page + ap + 1
pn = [n for n in range(startp, endp) if n > 0 and n <= p.num_pages]
pg = {'page': page, 'pages': p.num_pages, 'page_numbers': pn,
'next': po.next_page_number(), 'previous': po.previous_page_number(),
'has_next': po.has_next(), 'has_previous': po.has_previous(),
'show_first': 1 not in pn, 'show_last': p.num_pages not in pn,
'view_type': view_type, 'direction': direction, 'order_by': ordering,
'quarantine_type': quarantine_type}
json = anyjson.dumps({'items': message_list, 'paginator': pg,
'status': sys_status})
return HttpResponse(json, mimetype='application/javascript')
if list_all:
return object_list(request, template_name=template_name,
queryset=message_list, paginate_by=num_of_recent_msgs, page=page,
extra_context={'view_type': view_type, 'direction': direction,
'order_by': ordering, 'active_filters': active_filters,
'list_all': list_all, 'quarantine_type': quarantine_type,
'quarantine_form': form},
allow_empty=True)
else:
return object_list(request, template_name=template_name,
queryset=message_list, extra_context={'view_type': view_type,
'direction': direction, 'order_by': ordering,
'active_filters': active_filters, 'list_all': list_all,
'quarantine_type': quarantine_type})
0
Example 14
Project: transifex Source File: views.py
def _get_stringset(post_data, resources, language, review=False, session='', *args, **kwargs):
"""Return the source strings for the specified resources and language
based on the filters active in the request.
Filters are: translated|untranslated, specific user and specific
resources, which must be a subset of the resources argument. Also, the
user can select to search for a term, sort the columns and show more
languages other than the selected one.
"""
# Find a way to determine the source language of multiple resources #FIXME
source_language = get_source_language(resources)
try:
source_strings = _get_source_strings_for_request(
post_data, resources, source_language, language,
session
)
except LotteBadRequestError, e:
logger.warning("Error in lotte filters: %s" % e.message, exc_info=True)
return HttpResponseBadRequest()
translated_strings = Translation.objects.filter(
resource__in=resources,
language=language)
if not isinstance(source_strings, list):
more_languages = []
if post_data and post_data.has_key('more_languages'):
# rsplit is used to remove the trailing ','
more_languages = post_data.get('more_languages').rstrip(',').split(',')
# keyword filtering
search = post_data.get('sSearch', '')
if not search == '':
search, search_filter_query = get_search_filter_query(search)
query = Q()
for term in normalize_query(search):
query &= Q(string__icontains=term)
query |= Q(source_entity__string__icontains=term)
if query:
source_entities = translated_strings.filter(query).values('source_entity')
query |= Q(source_entity__in=source_entities)
source_strings = source_strings.filter(query)
if search_filter_query:
source_strings = source_strings.filter(search_filter_query)
# sorting
scols = post_data.get('iSortingCols', '0')
for i in range(0,int(scols)):
if post_data.has_key('iSortCol_'+str(i)):
col = int(post_data.get('iSortCol_'+str(i)))
if post_data.has_key('sSortDir_'+str(i)) and \
post_data['sSortDir_'+str(i)] == 'asc':
source_strings=source_strings.order_by(SORTING_DICT[col])
else:
source_strings=source_strings.order_by(SORTING_DICT[col]).reverse()
# for statistics
total = source_strings.count()
else:
total = 0
# for items displayed
try:
dlength = int(post_data.get('iDisplayLength','50'))
dstart = int(post_data.get('iDisplayStart','0'))
except ValueError, e:
return HttpResponseBadRequest()
# NOTE: It's important to keep the translation string matching inside this
# iteration to prevent extra un-needed queries. In this iteration only the
# strings displayed are calculated, saving a lot of resources.
response_dict = {
'sEcho': post_data.get('sEcho','1'),
'iTotalRecords': total,
'iTotalDisplayRecords': total,
'aaData': [
[
# 1. Translation object's "id"
s.id,
# 2. SourceEntity object's "string" content
s.source_entity.string,
# 3. Get all the necessary source strings, including plurals and
# similar langs, all in a dictionary (see also below)
_get_source_strings(s, source_language, language.code, more_languages),
# 4. Get all the Translation strings mapped with plural rules
# in a single dictionary (see docstring of function)
_get_strings(translated_strings, language, s.source_entity),
# 5. A number which indicates the number of Suggestion objects
# attached to this row of the table.
Suggestion.objects.filter(source_entity=s.source_entity, language__code=language.code).count(),
# 6. save buttons and hidden context (ready to inject snippet)
# It includes the following content, wrapped in span tags:
# * SourceEntity object's "context" value
# * SourceEntity object's "id" value
('<span class="save edit-panel inactive" id="save_' + str(counter) + '" style="border:0" title="' + _("Save the specific change") + '"></span>'
'<span class="spellcheck edit-panel inactive" id="spellcheck_' + str(counter) + '" style="border:0" title="' + _("Check spelling") + '"></span>'
'<span class="undo edit-panel inactive" id="undo_' + str(counter) + '" style="border:0" title="' + _("Undo to initial text") + '"></span>'
'<span class="context" id="context_' + str(counter) + '" style="display:none;">' + escape(str(s.source_entity.context_string.encode('UTF-8'))) + '</span>'
'<span class="source_id" id="sourceid_' + str(counter) + '"style="display:none;">' + str(s.source_entity.id) + '</span>'),
] for counter,s in enumerate(source_strings[dstart:dstart+dlength])
],
}
if review:
for counter, s in enumerate(source_strings[dstart:dstart+dlength]):
try:
translation = Translation.objects.get(
source_entity__id=s.source_entity.id,
language__code=language.code, rule=5
)
review_snippet = '<span><input class="review-check" title="' + _("Reviewed string") + '" id="review_source_' + str(s.source_entity.id) + '" type="checkbox" name="review" ' + ('checked="checked"' if translation.reviewed else '') + ' value="Review"/></span>',
except Translation.DoesNotExist:
review_snippet = '<span><input class="review-check" title="' + _("Reviewed string") + '" id="review_source_' + str(s.source_entity.id) + '" type="checkbox" name="review" disabled="disabled" value="Review"/></span>',
response_dict['aaData'][counter].append(review_snippet)
json = simplejson.dumps(response_dict)
return HttpResponse(json, mimetype='application/json')
0
Example 15
def twilio_view(f):
"""
This decorator provides several helpful shortcuts for writing Twilio views.
- It ensures that only requests from Twilio are passed through. This
helps protect you from forged requests.
- It ensures your view is exempt from CSRF checks via Django's
@csrf_exempt decorator. This is necessary for any view that accepts
POST requests from outside the local domain (eg: Twilio's servers).
- It enforces the blacklist. If you've got any ``Caller``s who are
blacklisted, any requests from them will be rejected.
- It allows your view to (optionally) return TwiML to pass back to
Twilio's servers instead of building an ``HttpResponse`` object
manually.
- It allows your view to (optionally) return any ``twilio.Verb`` object
instead of building a ``HttpResponse`` object manually.
.. note::
The forgery protection checks ONLY happen if ``settings.DEBUG =
False`` (aka, your site is in production).
Usage::
from twilio import twiml
@twilio_view
def my_view(request):
r = twiml.Response()
r.message('Thanks for the SMS message!')
return r
"""
@csrf_exempt
@wraps(f)
def decorator(request_or_self, *args, **kwargs):
class_based_view = not isinstance(request_or_self, HttpRequest)
if not class_based_view:
request = request_or_self
else:
assert len(args) >= 1
request = args[0]
# Turn off Twilio authentication when explicitly requested, or
# in debug mode. Otherwise things do not work properly. For
# more information, see the docs.
use_forgery_protection = getattr(
settings,
'DJANGO_TWILIO_FORGERY_PROTECTION',
not settings.DEBUG,
)
if use_forgery_protection:
if request.method not in ['GET', 'POST']:
return HttpResponseNotAllowed(request.method)
# Forgery check
try:
validator = RequestValidator(TWILIO_AUTH_TOKEN)
url = request.build_absolute_uri()
signature = request.META['HTTP_X_TWILIO_SIGNATURE']
except (AttributeError, KeyError):
return HttpResponseForbidden()
if request.method == 'POST':
if not validator.validate(url, request.POST, signature):
return HttpResponseForbidden()
if request.method == 'GET':
if not validator.validate(url, request.GET, signature):
return HttpResponseForbidden()
# Blacklist check, by default is true
check_blacklist = getattr(
settings,
'DJANGO_TWILIO_BLACKLIST_CHECK',
True
)
if check_blacklist:
blacklisted_resp = get_blacklisted_response(request)
if blacklisted_resp:
return blacklisted_resp
response = f(request_or_self, *args, **kwargs)
if isinstance(response, (text_type, bytes)):
return HttpResponse(response, content_type='application/xml')
elif isinstance(response, Verb):
return HttpResponse(str(response), content_type='application/xml')
else:
return response
return decorator
0
Example 16
Project: daywatch Source File: views.py
@catch_error
@log_activity
@login_required
@permission_required
def history_listings_div(request):
#prepare the params
context = get_status(request)
form = HistoryPanelExportForm(user=request.user, data=request.GET)
if form.is_valid():
request.session['form_session'] = form.cleaned_data
period = form.cleaned_data['period']
style = get_style()
style_ref = get_style_ref(style)
user = request.user
# Convert date parameters
end_date = datetime.now()
if period == 'last_30_d':
start_date = datetime.now() - timedelta(days=30)
elif period == 'last_15_d':
start_date = datetime.now() - timedelta(days=15)
elif period == 'last_7_d':
start_date = datetime.now() - timedelta(days=7)
elif period == 'custom':
d = form.cleaned_data['start_date']
start_date = datetime(d.year, d.month, d.day)
d = form.cleaned_data['end_date']
end_date = datetime(d.year, d.month, d.day, 23, 59)
country = form.cleaned_data['country']
context['use_local_currency'] = country in LOCAL_CURRENCY_COUNTRIES
context['local_currency'] = CURRENCY_DICT[country]
history_limit = 0
out_of_range_error = False
out_of_range_warning = False
if not user.has_full_access_for_country(country):
if user.week_history_limit > 0:
#user is history limited, limit start and end dates
week_limit = user.week_history_limit
history_limit = datetime.now() - timedelta(weeks=week_limit)
if end_date < history_limit:
out_of_range_error = True
elif start_date < history_limit:
start_date = history_limit
out_of_range_warning = True
history_limit = history_limit.date()
# Get deals for this query
if not out_of_range_error:
player_ids = form.cleaned_data['players']
player_ids = [int(p_id) for p_id in player_ids]
if form.cleaned_data['all_categories']:
items = DayWatchItem.objects.filter(
site__id__in=player_ids,
date_time__gte=start_date,
date_time__lte=end_date
)
categories = Category.objects.all()
category_ids = []
for category in categories:
if category.name != 'root':
category_ids.append(category.id)
else:
category_ids = form.cleaned_data['categories']
category_ids = [int(c_id) for c_id in category_ids]
items = DayWatchItem.objects.filter(
site__id__in=player_ids,
category__id__in=category_ids,
date_time__gte=start_date,
date_time__lte=end_date
)
else:
items = DayWatchItem.objects.none()
# Prepare and return results to upper layers
context['items'] = items
context['country'] = country
context['style_ref'] = style_ref
context['history_limit'] = history_limit
context['out_of_range_error'] = out_of_range_error
context['out_of_range_warning'] = out_of_range_warning
# excel button clicked
if form.data.get('excel'):
if not request.user.premium_access:
msg = " Sorry, Excel exports are limited to Premium Users."
return warningResponse(request, _(msg))
if not user.is_staff:
# We limit exportable deals to a month and a half from today
floor_date = datetime.now() - timedelta(weeks=7)
context['items'] = context['items'].filter(
start_date_time__gte=floor_date)
if start_date < floor_date:
context['floor_date_warn'] = floor_date
filename = "DayWatch_report_%s" % (
datetime.now().strftime("%d-%m-%Y_%H-%M"),
)
result = render_to_string(
'includes/history_table_xls.html',
context,
context_instance=RequestContext(request)
)
response = HttpResponse(
result,
content_type='application/vnd.ms-excel;charset=utf-8'
)
content_disposition = 'attachment; filename="%s.xls"' % (filename,)
response['Content-Disposition'] = content_disposition
return response
# Normal results rendering
# col_index_name_map is required for correct sorting behavior
index_name_map = {
0: 'offer',
1: 'company',
2: 'start_date_time',
3: 'end_date_time',
4: 'price',
5: 'price_usd',
6: 'discount',
7: 'category',
8: 'is_main_deal',
9: 'sold_count',
10: 'total_sales_usd',
11: 'merchant_name',
}
if context['use_local_currency']:
index_name_map[10] = 'total_sales_local'
json_template = 'includes/history_table_json.txt'
return get_datatables_records(
request, context['items'],
index_name_map, context, json_template
)
0
Example 17
Project: syndicate Source File: decorators.py
def precheck(g_type, redirect_view):
'''
Wrapper function to simplify verifying existence of gateways
and correct passwords when modifying gateways in all gateway views.
All wrappend functions need to take the following args:
+ g_type is the type of gateway, either 'AG' 'UG' or 'RG'
+ redirect_view is the location to be redirected
- request
- g_id
'''
# Three decorator types
def ag_gateway_precheck(f):
def ag_wrapper(request, g_id):
if not request.POST:
return redirect('django_ag.views.viewgateway', g_id=g_id)
session = request.session
username = session['login_email']
try:
g = db.read_acquisition_gateway(g_id)
if not g:
raise Exception("No gateway exists.")
except Exception as e:
logging.error("Error reading gateway %s : Exception: %s" % (g_id, e))
message = "No acquisition gateway by the name of %s exists." % g_id
t = loader.get_template("gateway_templates/viewgateway_failure.html")
c = Context({'message':message, 'username':username})
return HttpResponse(t.render(c))
form = libforms.Password(request.POST)
if not form.is_valid():
session['message'] = "Password required."
return redirect(redirect_view, g_id)
# Check password hash
if not AG.authenticate(g, form.cleaned_data['password']):
session['message'] = "Incorrect password."
return redirect(redirect_view, g_id)
return f(request, g_id)
return ag_wrapper
def ug_gateway_precheck(f):
def ug_wrapper(request, g_id):
if not request.POST:
return redirect('django_ug.views.viewgateway', g_id=g_id)
session = request.session
username = session['login_email']
try:
g = db.read_user_gateway(g_id)
if not g:
raise Exception("No gateway exists.")
except Exception as e:
logging.error("Error reading gateway %s : Exception: %s" % (g_id, e))
message = "No user gateway by the name of %s exists." % g_id
t = loader.get_template("gateway_templates/viewgateway_failure.html")
c = Context({'message':message, 'username':username})
return HttpResponse(t.render(c))
form = libforms.Password(request.POST)
if not form.is_valid():
session['message'] = "Password required."
return redirect(redirect_view, g_id)
# Check password hash
if not UG.authenticate(g, form.cleaned_data['password']):
session['message'] = "Incorrect password."
return redirect(redirect_view, g_id)
return f(request, g_id)
return ug_wrapper
def rg_gateway_precheck(f):
def rg_wrapper(request, g_id):
if not request.POST:
return redirect('django_rg.views.viewgateway', g_id=g_id)
session = request.session
username = session['login_email']
try:
g = db.read_replica_gateway(g_id)
if not g:
raise Exception("No gateway exists.")
except Exception as e:
logging.error("Error reading gateway %s : Exception: %s" % (g_id, e))
message = "No replica gateway by the name of %s exists." % g_id
t = loader.get_template("gateway_templates/viewgateway_failure.html")
c = Context({'message':message, 'username':username})
return HttpResponse(t.render(c))
form = libforms.Password(request.POST)
if not form.is_valid():
session['message'] = "Password required."
return redirect(redirect_view, g_id)
# Check password hash
if not RG.authenticate(g, form.cleaned_data['password']):
session['message'] = "Incorrect password."
return redirect(redirect_view, g_id)
return f(request, g_id)
return rg_wrapper
# Pythonesque case statement to determine what type of decorator to return.
decorators = {"AG": ag_gateway_precheck,
"RG": rg_gateway_precheck,
"UG": ug_gateway_precheck
}
# Executed code
try:
return decorators[g_type]
except KeyError:
logging.error("Gatway type argument %s for decorators.precheck doesn't exist." % g_type)
return redirect('/syn/home')
0
Example 18
Project: wger Source File: plan.py
def export_pdf(request, id, uidb64=None, token=None):
'''
Generates a PDF with the contents of a nutrition plan
See also
* http://www.blog.pythonlibrary.org/2010/09/21/reportlab
* http://www.reportlab.com/apis/reportlab/dev/platypus.html
'''
# Load the plan
if uidb64 is not None and token is not None:
if check_token(uidb64, token):
plan = get_object_or_404(NutritionPlan, pk=id)
else:
return HttpResponseForbidden()
else:
if request.user.is_anonymous():
return HttpResponseForbidden()
plan = get_object_or_404(NutritionPlan, pk=id, user=request.user)
plan_data = plan.get_nutritional_values()
# Create the HttpResponse object with the appropriate PDF headers.
response = HttpResponse(content_type='application/pdf')
# Create the PDF object, using the response object as its "file."
doc = SimpleDocTemplate(response,
pagesize=A4,
title=_('Nutrition plan'),
author='wger Workout Manager',
subject=_('Nutritional plan %s') % request.user.username)
# Background colour for header
# Reportlab doesn't use the HTML hexadecimal format, but has a range of
# 0 till 1, so we have to convert here.
header_colour = colors.Color(int('73', 16) / 255.0,
int('8a', 16) / 255.0,
int('5f', 16) / 255.0)
# container for the 'Flowable' objects
elements = []
data = []
# Iterate through the Plan
meal_markers = []
ingredient_markers = []
# Meals
i = 0
for meal in plan.meal_set.select_related():
i += 1
meal_markers.append(len(data))
if not meal.time:
p = Paragraph(u'<para align="center"><strong>{nr} {meal_nr}</strong></para>'
.format(nr=_('Nr.'), meal_nr=i),
styleSheet["Normal"])
else:
p = Paragraph(u'<para align="center"><strong>'
u'{nr} {meal_nr} - {meal_time}'
u'</strong></para>'
.format(nr=_('Nr.'), meal_nr=i, meal_time=meal.time.strftime("%H:%M")),
styleSheet["Normal"])
data.append([p])
# Ingredients
for item in meal.mealitem_set.select_related():
ingredient_markers.append(len(data))
p = Paragraph(u'<para>{0}</para>'.format(item.ingredient.name), styleSheet["Normal"])
if item.get_unit_type() == MEALITEM_WEIGHT_GRAM:
unit_name = 'g'
else:
unit_name = ' ' + item.weight_unit.unit.name
data.append([Paragraph(u"{0}{1}".format(item.amount, unit_name), styleSheet["Normal"]),
p])
# Set general table styles
table_style = []
# Set specific styles, e.g. background for title cells
for marker in meal_markers:
# Set background colour for headings
table_style.append(('BACKGROUND', (0, marker), (-1, marker), header_colour))
table_style.append(('BOX', (0, marker), (-1, marker), 1.25, colors.black))
# Make the headings span the whole width
table_style.append(('SPAN', (0, marker), (-1, marker)))
# has the plan any data?
if data:
t = Table(data, style=table_style)
# Manually set the width of the columns
t._argW[0] = 2.5 * cm
# There is nothing to output
else:
t = Paragraph(_('<i>This is an empty plan, what did you expect on the PDF?</i>'),
styleSheet["Normal"])
# Set the title (if available)
if plan.description:
p = Paragraph('<para align="center"><strong>%(description)s</strong></para>' %
{'description': plan.description},
styleSheet["Bold"])
elements.append(p)
# Filler
elements.append(Spacer(10 * cm, 0.5 * cm))
# append the table to the docuement
elements.append(t)
elements.append(Paragraph('<para> </para>', styleSheet["Normal"]))
# Create table with nutritional calculations
data = []
data.append([Paragraph(u'<para align="center">{0}</para>'.format(_('Nutritional data')),
styleSheet["Bold"])])
data.append([Paragraph(_('Macronutrients'), styleSheet["Normal"]),
Paragraph(_('Total'), styleSheet["Normal"]),
Paragraph(_('Percent of energy'), styleSheet["Normal"]),
Paragraph(_('g per body kg'), styleSheet["Normal"])])
data.append([Paragraph(_('Energy'), styleSheet["Normal"]),
Paragraph(six.text_type(plan_data['total']['energy']), styleSheet["Normal"])])
data.append([Paragraph(_('Protein'), styleSheet["Normal"]),
Paragraph(six.text_type(plan_data['total']['protein']), styleSheet["Normal"]),
Paragraph(six.text_type(plan_data['percent']['protein']), styleSheet["Normal"]),
Paragraph(six.text_type(plan_data['per_kg']['protein']), styleSheet["Normal"])])
data.append([Paragraph(_('Carbohydrates'), styleSheet["Normal"]),
Paragraph(six.text_type(plan_data['total']['carbohydrates']),
styleSheet["Normal"]),
Paragraph(six.text_type(plan_data['percent']['carbohydrates']),
styleSheet["Normal"]),
Paragraph(six.text_type(plan_data['per_kg']['carbohydrates']),
styleSheet["Normal"])])
data.append([Paragraph(_('Sugar content in carbohydrates'), styleSheet["Normal"]),
Paragraph(six.text_type(plan_data['total']['carbohydrates_sugar']),
styleSheet["Normal"])])
data.append([Paragraph(_('Fat'), styleSheet["Normal"]),
Paragraph(six.text_type(plan_data['total']['fat']), styleSheet["Normal"]),
Paragraph(six.text_type(plan_data['percent']['fat']), styleSheet["Normal"]),
Paragraph(six.text_type(plan_data['per_kg']['fat']), styleSheet["Normal"])])
data.append([Paragraph(_('Saturated fat content in fats'), styleSheet["Normal"]),
Paragraph(six.text_type(plan_data['total']['fat_saturated']),
styleSheet["Normal"])])
data.append([Paragraph(_('Fibres'), styleSheet["Normal"]),
Paragraph(six.text_type(plan_data['total']['fibres']), styleSheet["Normal"])])
data.append([Paragraph(_('Sodium'), styleSheet["Normal"]),
Paragraph(six.text_type(plan_data['total']['sodium']), styleSheet["Normal"])])
table_style = []
table_style.append(('BOX', (0, 0), (-1, -1), 1.25, colors.black))
table_style.append(('GRID', (0, 0), (-1, -1), 0.40, colors.black))
table_style.append(('SPAN', (0, 0), (-1, 0))) # Title
table_style.append(('SPAN', (1, 2), (-1, 2))) # Energy
table_style.append(('SPAN', (1, 5), (-1, 5))) # Sugar
table_style.append(('SPAN', (1, 7), (-1, 7))) # Saturated fats
table_style.append(('SPAN', (1, 8), (-1, 8))) # Fibres
table_style.append(('SPAN', (1, 9), (-1, 9))) # Sodium
t = Table(data, style=table_style)
t._argW[0] = 5 * cm
elements.append(t)
# Footer, date and info
elements.append(Spacer(10 * cm, 0.5 * cm))
created = datetime.date.today().strftime("%d.%m.%Y")
url = reverse('nutrition:plan:view', kwargs={'id': plan.id})
p = Paragraph('''<para align="left">
%(date)s -
<a href="%(url)s">%(url)s</a> -
%(created)s
%(version)s
</para>''' %
{'date': _("Created on the <b>%s</b>") % created,
'created': "wger Workout Manager",
'version': get_version(),
'url': request.build_absolute_uri(url), },
styleSheet["Normal"])
elements.append(p)
doc.build(elements)
response['Content-Disposition'] = 'attachment; filename=nutritional-plan.pdf'
response['Content-Length'] = len(response.content)
return response
0
Example 19
def condition(etag_func=None, last_modified_func=None):
"""
Decorator to support conditional retrieval (or change) for a view
function.
The parameters are callables to compute the ETag and last modified time for
the requested resource, respectively. The callables are passed the same
parameters as the view itself. The Etag function should return a string (or
None if the resource doesn't exist), whilst the last_modified function
should return a datetime object (or None if the resource doesn't exist).
If both parameters are provided, all the preconditions must be met before
the view is processed.
This decorator will either pass control to the wrapped view function or
return an HTTP 304 response (unmodified) or 412 response (preconditions
failed), depending upon the request method.
Any behavior marked as "undefined" in the HTTP spec (e.g. If-none-match
plus If-modified-since headers) will result in the view function being
called.
"""
def decorator(func):
@wraps(func, assigned=available_attrs(func))
def inner(request, *args, **kwargs):
# Get HTTP request headers
if_modified_since = request.META.get("HTTP_IF_MODIFIED_SINCE")
if if_modified_since:
if_modified_since = parse_http_date_safe(if_modified_since)
if_none_match = request.META.get("HTTP_IF_NONE_MATCH")
if_match = request.META.get("HTTP_IF_MATCH")
if if_none_match or if_match:
# There can be more than one ETag in the request, so we
# consider the list of values.
try:
etags = parse_etags(if_none_match or if_match)
except ValueError:
# In case of invalid etag ignore all ETag headers.
# Apparently Opera sends invalidly quoted headers at times
# (we should be returning a 400 response, but that's a
# little extreme) -- this is Django bug #10681.
if_none_match = None
if_match = None
# Compute values (if any) for the requested resource.
if etag_func:
res_etag = etag_func(request, *args, **kwargs)
else:
res_etag = None
if last_modified_func:
dt = last_modified_func(request, *args, **kwargs)
if dt:
res_last_modified = timegm(dt.utctimetuple())
else:
res_last_modified = None
else:
res_last_modified = None
response = None
if not ((if_match and (if_modified_since or if_none_match)) or
(if_match and if_none_match)):
# We only get here if no undefined combinations of headers are
# specified.
if ((if_none_match and (res_etag in etags or
"*" in etags and res_etag)) and
(not if_modified_since or
(res_last_modified and if_modified_since and
res_last_modified <= if_modified_since))):
if request.method in ("GET", "HEAD"):
response = HttpResponseNotModified()
else:
logger.warning('Precondition Failed: %s', request.path,
extra={
'status_code': 412,
'request': request
}
)
response = HttpResponse(status=412)
elif if_match and ((not res_etag and "*" in etags) or
(res_etag and res_etag not in etags)):
logger.warning('Precondition Failed: %s', request.path,
extra={
'status_code': 412,
'request': request
}
)
response = HttpResponse(status=412)
elif (not if_none_match and request.method == "GET" and
res_last_modified and if_modified_since and
res_last_modified <= if_modified_since):
response = HttpResponseNotModified()
if response is None:
response = func(request, *args, **kwargs)
# Set relevant headers on the response if they don't already exist.
if res_last_modified and not response.has_header('Last-Modified'):
response['Last-Modified'] = http_date(res_last_modified)
if res_etag and not response.has_header('ETag'):
response['ETag'] = quote_etag(res_etag)
return response
return inner
return decorator
0
Example 20
Project: explorer Source File: views.py
@csrf_exempt
def address_webhook(request, secret_key, ignored_key):
'''
Process an inbound webhook from blockcypher
'''
# Log webhook
webhook = WebHook.log_webhook(request, WebHook.BLOCKCYPHER_ADDRESS_NOTIFICATION)
assert secret_key == WEBHOOK_SECRET_KEY
assert request.method == 'POST', 'Request has no post'
blockcypher_id = request.META.get('HTTP_X_EVENTID')
assert 'tx-confirmation' == request.META.get('HTTP_X_EVENTTYPE')
payload = json.loads(request.body.decode())
address_subscription = AddressSubscription.objects.get(blockcypher_id=blockcypher_id)
tx_hash = payload['hash']
num_confs = payload['confirmations']
double_spend = payload['double_spend']
satoshis_sent = payload['total']
fee_in_satoshis = payload['fees']
tx_event = get_object_or_None(
OnChainTransaction,
tx_hash=tx_hash,
address_subscription=address_subscription,
)
if tx_event:
tx_is_new = False
tx_event.num_confs = num_confs
tx_event.double_spend = double_spend
tx_event.save()
else:
tx_is_new = True
input_addresses = set()
for input_entry in payload['inputs']:
for address in input_entry.get('addresses', []):
input_addresses.add(address)
if address_subscription.b58_address in input_addresses:
is_withdrawal = True
else:
is_withdrawal = False
output_addresses = set()
for output_entry in payload.get('outputs', []):
for address in output_entry['addresses']:
output_addresses.add(address)
if address_subscription.b58_address in output_addresses:
is_deposit = True
else:
is_deposit = False
tx_event = OnChainTransaction.objects.create(
tx_hash=tx_hash,
address_subscription=address_subscription,
num_confs=num_confs,
double_spend=double_spend,
satoshis_sent=satoshis_sent,
fee_in_satoshis=fee_in_satoshis,
is_deposit=is_deposit,
is_withdrawal=is_withdrawal,
)
# email sending logic
# TODO: add logic for notify on deposit vs withdrawal
# TODO: add safety check to prevent duplicate email sending
if tx_event.address_subscription.unsubscribed_at or tx_event.address_subscription.disabled_at:
# unsubscribe from webhooks going forward
try:
unsub_result = unsubscribe_from_webhook(
webhook_id=tx_event.address_subscription.blockcypher_id,
api_key=BLOCKCYPHER_API_KEY,
coin_symbol=tx_event.address_subscription.coin_symbol,
)
assert unsub_result is True, unsub_result
except Exception:
# there was a problem unsubscribing
# notify using sentry but still return the webhook to blockcypher
client.captureException()
elif tx_event.address_subscription.auth_user.email_verified:
# make sure we haven't contacted too many times (and unsub if so)
earliest_dt = now() - timedelta(days=3)
recent_emails_sent = SentEmail.objects.filter(
address_subscription=tx_event.address_subscription,
sent_at__gt=earliest_dt,
).count()
if recent_emails_sent > 100:
# too many emails, unsubscribe
tx_event.address_subscription.admin_unsubscribe_subscription()
client.captureMessage('TX Event %s unsubscribed' % tx_event.id)
# TODO: notify user they've been unsubscribed
else:
# proceed with normal email sending
if double_spend and (tx_is_new or not tx_event.double_spend):
# We have the first reporting of a double-spend
tx_event.send_double_spend_tx_notification()
elif num_confs == 0 and tx_is_new:
# First broadcast
if tx_event.address_subscription.notify_on_broadcast:
if tx_event.is_deposit and tx_event.address_subscription.notify_on_deposit:
tx_event.send_unconfirmed_tx_email()
elif tx_event.is_withdrawal and tx_event.address_subscription.notify_on_withdrawal:
tx_event.send_unconfirmed_tx_email()
elif num_confs == 6:
# Sixth confirm
if tx_event.address_subscription.notify_on_sixth_confirm:
if tx_event.is_deposit and tx_event.address_subscription.notify_on_deposit:
tx_event.send_confirmed_tx_email()
elif tx_event.is_withdrawal and tx_event.address_subscription.notify_on_withdrawal:
tx_event.send_confirmed_tx_email()
else:
# active subscription with unverfied email (can't contact)
# TODO: add unsub if orig subscription is > X days old
# eventually these could pile up
pass
# Update logging
webhook.address_subscription = address_subscription
webhook.succeeded = True
webhook.save()
# Return something
return HttpResponse("*ok*")
0
Example 21
Project: tendenci Source File: views.py
@is_enabled('files')
def details(request, id, size=None, crop=False, quality=90, download=False, constrain=False, template_name="files/details.html"):
"""
Return an image response after paramters have been applied.
"""
cache_key = generate_image_cache_key(
file=id,
size=size,
pre_key=FILE_IMAGE_PRE_KEY,
crop=crop,
unique_key=id,
quality=quality,
constrain=constrain)
cached_image = cache.get(cache_key)
if cached_image:
return redirect('%s%s' % (get_setting('site', 'global', 'siteurl'), cached_image))
file = get_object_or_404(File, pk=id)
# basic permissions
if not has_view_perm(request.user, 'files.view_file', file):
raise Http403
# extra permission
if not file.is_public:
if not request.user.is_authenticated():
raise Http403
# if string and digit convert to integer
if isinstance(quality, basestring) and quality.isdigit():
quality = int(quality)
# get image binary
try:
data = file.file.read()
file.file.close()
except IOError: # no such file or directory
raise Http404
if download: # log download
attachment = u'attachment;'
EventLog.objects.log(**{
'event_id': 185000,
'event_data': '%s %s (%d) dowloaded by %s' % (file.type(), file._meta.object_name, file.pk, request.user),
'description': '%s downloaded' % file._meta.object_name,
'user': request.user,
'request': request,
'instance': file,
})
else: # log view
attachment = u''
if file.type() != 'image':
EventLog.objects.log(**{
'event_id': 186000,
'event_data': '%s %s (%d) viewed by %s' % (file.type(), file._meta.object_name, file.pk, request.user),
'description': '%s viewed' % file._meta.object_name,
'user': request.user,
'request': request,
'instance': file,
})
# if image size specified
if file.type() == 'image' and size: # if size specified
if file.ext() in ('.tif', '.tiff'):
raise Http404 # tifs cannot (currently) be viewed via browsers
size = [int(s) if s.isdigit() else 0 for s in size.split('x')]
size = aspect_ratio(file.image_dimensions(), size, constrain)
# check for dimensions
# greater than zero
if not all(size):
raise Http404
# gets resized image from cache or rebuilds
image = get_image(file.file, size, FILE_IMAGE_PRE_KEY, cache=True, crop=crop, quality=quality, unique_key=None)
response = HttpResponse(content_type=file.mime_type())
response['Content-Disposition'] = '%s filename=%s' % (attachment, file.get_name())
params = {'quality': quality}
if image.format == 'GIF':
params['transparency'] = 0
image.save(response, image.format, **params)
if file.is_public_file():
file_name = "%s%s" % (file.get_name(), ".jpg")
file_path = 'cached%s%s' % (request.path, file_name)
default_storage.delete(file_path)
default_storage.save(file_path, ContentFile(response.content))
full_file_path = "%s%s" % (settings.MEDIA_URL, file_path)
cache.set(cache_key, full_file_path)
cache_group_key = "files_cache_set.%s" % file.pk
cache_group_list = cache.get(cache_group_key)
if cache_group_list is None:
cache.set(cache_group_key, [cache_key])
else:
cache_group_list += [cache_key]
cache.set(cache_group_key, cache_group_list)
return response
if file.is_public_file():
cache.set(cache_key, file.get_file_public_url())
set_s3_file_permission(file.file, public=True)
cache_group_key = "files_cache_set.%s" % file.pk
cache_group_list = cache.get(cache_group_key)
if cache_group_list is None:
cache.set(cache_group_key, [cache_key])
else:
cache_group_list += cache_key
cache.set(cache_group_key, cache_group_list)
# set mimetype
if file.mime_type():
response = HttpResponse(data, content_type=file.mime_type())
else:
raise Http404
# return response
if file.get_name().endswith(file.ext()):
response['Content-Disposition'] = '%s filename=%s' % (attachment, file.get_name())
else:
response['Content-Disposition'] = '%s filename=%s' % (attachment, file.get_name_ext())
return response
0
Example 22
Project: tendenci Source File: views.py
@login_required
def template_view(request, template_id, render=True):
"""
Generate newsletter preview
Combine template with context passed via GET
"""
template = get_object_or_404(NewsletterTemplate, template_id=template_id)
if not template.html_file:
raise Http404
if not has_perm(request.user, 'newsletters.view_newslettertemplate'):
raise Http403
simplified = True
login_content = ""
include_login = int(request.GET.get('include_login', 0))
if include_login:
login_content = render_to_string('newsletters/login.txt',
context_instance=RequestContext(request))
jumplink_content = ""
jump_links = int(request.GET.get('jump_links', 1))
if jump_links:
jumplink_content = render_to_string('newsletters/jumplinks.txt', locals(),
context_instance=RequestContext(request))
art_content = ""
articles = int(request.GET.get('articles', 1))
articles_days = request.GET.get('articles_days', 60)
if articles:
articles_list, articles_content = newsletter_articles_list(request, articles_days, simplified)
else:
articles_list = []
articles_content = []
news_content = ""
news = int(request.GET.get('news', 1))
news_days = request.GET.get('news_days',30)
if news:
news_list, news_content = newsletter_news_list(request, news_days, simplified)
else:
news_list = []
news_content = []
jobs_content = ""
jobs = int(request.GET.get('jobs', 1))
jobs_days = request.GET.get('jobs_days', 30)
if jobs:
jobs_list, jobs_content = newsletter_jobs_list(request, jobs_days, simplified)
else:
jobs_list = []
jobs_content = []
pages_content = ""
pages = int(request.GET.get('pages', 0))
pages_days = request.GET.get('pages_days', 7)
if pages:
pages_list, pages_content = newsletter_pages_list(request, pages_days, simplified)
else:
pages_list = []
pages_content = []
directories_content = ""
directories = int(request.GET.get('directories', 0))
directories_days = request.GET.get('directories_days', 7)
if directories:
directories_list, directories_content = newsletter_directories_list(request, directories_days, simplified)
else:
directories_list = []
directories_content = []
resumes_content = ""
resumes = int(request.GET.get('resumes', 0))
resumes_days = request.GET.get('resumes_days', 7)
if resumes:
resumes_list, resumes_content = newsletter_resumes_list(request, resumes_days, simplified)
else:
resumes_list = []
resumes_content = []
try:
events = int(request.GET.get('events', 1))
events_type = request.GET.get('events_type')
start_y, start_m, start_d = request.GET.get('event_start_dt', str(datetime.date.today())).split('-')
event_start_dt = datetime.date(int(start_y), int(start_m), int(start_d))
end_y, end_m, end_d = request.GET.get(
'event_end_dt',
str(datetime.date.today() + datetime.timedelta(days=90))).split('-')
event_end_dt = datetime.date(int(end_y), int(end_m), int(end_d))
events_list, events_content = newsletter_events_list(
request,
start_dt=event_start_dt,
end_dt=event_end_dt,
simplified=simplified)
except ImportError:
events_list = []
events_type = None
text = DTemplate(apply_template_media(template))
context = RequestContext(request,
{
'jumplink_content':jumplink_content,
'login_content':login_content,
"art_content":articles_content, # legacy usage in templates
"articles_content":articles_content,
"articles_list":articles_list,
"jobs_content":jobs_content,
"jobs_list":jobs_list,
"news_content":news_content,
"news_list":news_list,
"pages_content":pages_content,
"pages_list":pages_content,
"directories_content":directories_content,
"directories_list":directories_list,
"resumes_content":resumes_content,
"resumes_list":resumes_list,
"events":events_list, # legacy usage in templates
"events_content":events_content,
"events_list":events_list,
"events_type":events_type
})
content = text.render(context)
if render:
response = HttpResponse(content)
return response
else:
template_name="newsletters/content.html"
return render_to_response(
template_name, {
'content': content,
'template': template},
context_instance=RequestContext(request))
0
Example 23
Project: GAE-Bulk-Mailer Source File: i18n.py
def javascript_catalog(request, domain='djangojs', packages=None):
"""
Returns the selected language catalog as a javascript library.
Receives the list of packages to check for translations in the
packages parameter either from an infodict or as a +-delimited
string from the request. Default is 'django.conf'.
Additionally you can override the gettext domain for this view,
but usually you don't want to do that, as JavaScript messages
go to the djangojs domain. But this might be needed if you
deliver your JavaScript source from Django templates.
"""
if request.GET:
if 'language' in request.GET:
if check_for_language(request.GET['language']):
activate(request.GET['language'])
if packages is None:
packages = ['django.conf']
if isinstance(packages, six.string_types):
packages = packages.split('+')
packages = [p for p in packages if p == 'django.conf' or p in settings.INSTALLED_APPS]
default_locale = to_locale(settings.LANGUAGE_CODE)
locale = to_locale(get_language())
t = {}
paths = []
en_selected = locale.startswith('en')
en_catalog_missing = True
# paths of requested packages
for package in packages:
p = importlib.import_module(package)
path = os.path.join(os.path.dirname(upath(p.__file__)), 'locale')
paths.append(path)
# add the filesystem paths listed in the LOCALE_PATHS setting
paths.extend(list(reversed(settings.LOCALE_PATHS)))
# first load all english languages files for defaults
for path in paths:
try:
catalog = gettext_module.translation(domain, path, ['en'])
t.update(catalog._catalog)
except IOError:
pass
else:
# 'en' is the selected language and at least one of the packages
# listed in `packages` has an 'en' catalog
if en_selected:
en_catalog_missing = False
# next load the settings.LANGUAGE_CODE translations if it isn't english
if default_locale != 'en':
for path in paths:
try:
catalog = gettext_module.translation(domain, path, [default_locale])
except IOError:
catalog = None
if catalog is not None:
t.update(catalog._catalog)
# last load the currently selected language, if it isn't identical to the default.
if locale != default_locale:
# If the currently selected language is English but it doesn't have a
# translation catalog (presumably due to being the language translated
# from) then a wrong language catalog might have been loaded in the
# previous step. It needs to be discarded.
if en_selected and en_catalog_missing:
t = {}
else:
locale_t = {}
for path in paths:
try:
catalog = gettext_module.translation(domain, path, [locale])
except IOError:
catalog = None
if catalog is not None:
locale_t.update(catalog._catalog)
if locale_t:
t = locale_t
src = [LibHead]
plural = None
if '' in t:
for l in t[''].split('\n'):
if l.startswith('Plural-Forms:'):
plural = l.split(':',1)[1].strip()
if plural is not None:
# this should actually be a compiled function of a typical plural-form:
# Plural-Forms: nplurals=3; plural=n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2;
plural = [el.strip() for el in plural.split(';') if el.strip().startswith('plural=')][0].split('=',1)[1]
src.append(PluralIdx % plural)
else:
src.append(SimplePlural)
csrc = []
pdict = {}
for k, v in t.items():
if k == '':
continue
if isinstance(k, six.string_types):
csrc.append("catalog['%s'] = '%s';\n" % (javascript_quote(k), javascript_quote(v)))
elif isinstance(k, tuple):
if k[0] not in pdict:
pdict[k[0]] = k[1]
else:
pdict[k[0]] = max(k[1], pdict[k[0]])
csrc.append("catalog['%s'][%d] = '%s';\n" % (javascript_quote(k[0]), k[1], javascript_quote(v)))
else:
raise TypeError(k)
csrc.sort()
for k, v in pdict.items():
src.append("catalog['%s'] = [%s];\n" % (javascript_quote(k), ','.join(["''"]*(v+1))))
src.extend(csrc)
src.append(LibFoot)
src.append(InterPolate)
src.append(LibFormatHead)
src.append(get_formats())
src.append(LibFormatFoot)
src = ''.join(src)
return http.HttpResponse(src, 'text/javascript')
0
Example 24
Project: tendenci Source File: views.py
def group_all_export(request, group_slug):
"""
Export all group members for a specific group
"""
group = get_object_or_404(Group, slug=group_slug)
# if they can edit it, they can export it
if not has_perm(request.user,'user_groups.change_group', group):
raise Http403
import xlwt
from ordereddict import OrderedDict
from django.db import connection
from tendenci.apps.forms_builder.forms.models import FieldEntry
# create the excel book and sheet
book = xlwt.Workbook(encoding='utf8')
sheet = book.add_sheet('Group Members and Subscribers')
#initialize indexes
row_index = {}
col_index = {}
#---------
# MEMBERS
#---------
# excel date styles
default_style = xlwt.Style.default_style
datetime_style = xlwt.easyxf(num_format_str='mm/dd/yyyy hh:mm')
date_style = xlwt.easyxf(num_format_str='mm/dd/yyyy')
# the key is what the column will be in the
# excel sheet. the value is the database lookup
# Used OrderedDict to maintain the column order
group_mappings = OrderedDict([
('user_id', 'au.id'),
('first_name', 'au.first_name'),
('last_name', 'au.last_name'),
('email', 'au.email'),
('receives email', 'pp.direct_mail'),
('company', 'pp.company'),
('address', 'pp.address'),
('address2', 'pp.address2'),
('city', 'pp.city'),
('state', 'pp.state'),
('zipcode', 'pp.zipcode'),
('country', 'pp.country'),
('phone', 'pp.phone'),
('is_active', 'au.is_active'),
('date', 'gm.create_dt'),
])
group_lookups = ','.join(group_mappings.values())
# Use custom sql to fetch the rows because we need to
# populate the user profiles information and you
# cannot do that with django's ORM without using
# profile for each user query
# pulling 13,000 group members can be done in one
# query using Django's ORM but then you need
# 13,000 individual queries :(
cursor = connection.cursor()
sql = "SELECT %s FROM user_groups_groupmembership gm \
INNER JOIN auth_user au ON (au.id = gm.member_id) \
LEFT OUTER JOIN profiles_profile pp \
on (pp.user_id = gm.member_id) WHERE group_id = %%s;"
sql = sql % group_lookups
cursor.execute(sql, [group.pk])
values_list = list(cursor.fetchall())
# index the group key mappings and insert them into the sheet.
for key in group_mappings.keys():
if not key in col_index:
col = len(col_index.keys())
col_index[key] = col
sheet.write(0, col, key, style=default_style)
if values_list:
# Write the data enumerated to the excel sheet
for row, row_data in enumerate(values_list):
for col, val in enumerate(row_data):
if not row in row_index:
# assign the row if it is not yet available
row_index[row] = row + 1
# styles the date/time fields
if isinstance(val, datetime):
style = datetime_style
elif isinstance(val, date):
style = date_style
else:
style = default_style
sheet.write(row + 1, col, val, style=style)
#-------------
# Subscribers
#-------------
entries = FieldEntry.objects.filter(entry__subscriptions__group=group).distinct()
for entry in entries:
val = entry.value
field = entry.field.label.lower().replace(" ", "_")
if "subscriber %s" % str(entry.entry.pk) in row_index:
# get the subscriber's row number
row = row_index["subscriber %s" % str(entry.entry.pk)]
else:
# assign the row if it is not yet available
row = len(row_index.keys()) + 1
row_index["subscriber %s" % str(entry.entry.pk)] = row
if field in col_index:
# get the entry's col number
col = col_index[field]
else:
# assign the col if it is not yet available
# and label the new column
col = len(col_index.keys())
col_index[field] = col
sheet.write(0, col, field, style=default_style)
# styles the date/time fields
if isinstance(val, datetime):
style = datetime_style
elif isinstance(val, date):
style = date_style
else:
style = default_style
sheet.write(row, col, val, style=style)
response = HttpResponse(content_type='application/vnd.ms-excel')
response['Content-Disposition'] = 'attachment; filename=group_%s_all_export.xls' % group.pk
book.save(response)
return response
0
Example 25
Project: django-admin-timeline Source File: views.py
@csrf_exempt
@never_cache
@staff_member_required
def log(request, template_name=TEMPLATE_NAME, \
template_name_ajax=TEMPLATE_NAME_AJAX):
"""
Get number of log entires. Serves both non-AJAX and AJAX driven requests.
Since we have a breakdown of entries per day per entry and we have an AJAX
driven infinite scroll and we want to avoid having duplicated date headers,
we always pass a variable named "last_date" when making another request
to our main AJAX-driven view. So... this is our case scenario:
Initial timeline rendered as a normal HTML (non AJAX request) (from a list
of log entries). We send date of last element as "last_date" to the context
too, which will be used an an initial value for a global JavaScript
variable. Later on that date will be used to send it to the AJAX driven
view and used in rendering ("render_to_string" method). After we have
rendered the HTML to send back, we get the last date of the last element
and send it along with the HTML rendered to our view in JSON response.
When receiving the JSON response, we update the above mentioned global
JavaScript variable with the value given.
:param request: django.http.HttpRequest
:param template_name: str
:param template_name_ajax: str
:return: django.http.HttpResponse
This view accepts the following POST variables (all optional).
:param page: int - Page number to get.
:param user_id: int - If set, used to filter the user by.
:param last_date: str - Example value "2012-05-24".
:param start_date: str - If set, used as a start date to filter the actions
with. Example value "2012-05-24".
:param end_date: str - If set, used as an end date to filter the actions
with. Example value "2012-05-24".
NOTE: If it gets too complicatd with filtering, we need to have forms to
validate and process the POST data.
"""
def _get_date_from_string(s):
"""
Gets date from a string given.
:param s: str - date in string format
:return: datetime.datetime
"""
try:
return datetime.date(*map(lambda x: int(x), s.split("-")))
except Exception as e:
return ""
try:
page = int(request.POST.get('page', 1))
if page < 1:
page = 1
except Exception as e:
page = 1
users = []
content_types = []
filter_form = None
if 'POST' == request.method:
post = dict(request.POST)
if 'users[]' in post:
post['users'] = post.pop('users[]')
if 'content_types[]' in post:
post['content_types'] = post.pop('content_types[]')
filter_form = FilterForm(post)
if filter_form.is_valid():
users = filter_form.cleaned_data['users']
content_types = filter_form.cleaned_data['content_types']
else:
pass # Anything to do here?
else:
filter_form = FilterForm()
# Some kind of a pagination
start = (page - 1) * NUMBER_OF_ENTRIES_PER_PAGE
end = page * NUMBER_OF_ENTRIES_PER_PAGE
# Getting admin log entires taking page number into consideration.
log_entries = LogEntry.objects.all().select_related('content_type', 'user')
start_date = _get_date_from_string(request.POST.get('start_date'))
end_date = _get_date_from_string(request.POST.get('end_date'))
if start_date:
log_entries = log_entries.filter(action_time__gte=start_date) # TODO
if end_date:
log_entries = log_entries.filter(action_time__lte=end_date) # TODO
# If users given, filtering by users
if users:
log_entries = log_entries.filter(user__id__in=users)
# If content types given, filtering by content types
if content_types:
log_entries = log_entries.filter(content_type__id__in=content_types)
# Applying limits / freezing the queryset
log_entries = log_entries[start:end]
if log_entries:
last_date = date_format(
log_entries[len(log_entries) - 1].action_time, "Y-m-d"
)
else:
last_date = request.POST.get('last_date', None)
# Using different template for AJAX driven requests
if request.is_ajax():
# Context to render the AJAX driven HTML with
context = {
'admin_log': log_entries,
'number_of_entries_per_page': NUMBER_OF_ENTRIES_PER_PAGE,
'page': page,
'last_date': request.POST.get('last_date', None),
'SINGLE_LOG_ENTRY_DATE_FORMAT': SINGLE_LOG_ENTRY_DATE_FORMAT,
'LOG_ENTRIES_DAY_HEADINGS_DATE_FORMAT': \
LOG_ENTRIES_DAY_HEADINGS_DATE_FORMAT
}
# Rendering HTML for an AJAX driven request
html = render_to_string(
template_name_ajax,
context,
context_instance=RequestContext(request)
)
# Context to send back to user in a JSON response
context = {
'html': html,
'last_date': last_date,
'success': 1 if len(log_entries) else 0
}
return HttpResponse(json.dumps(context))
# Context for a non-AJAX request
context = {
'admin_log': log_entries,
'number_of_entries_per_page': NUMBER_OF_ENTRIES_PER_PAGE,
'page': page,
'last_date': last_date,
'start_date': date_format(start_date, "Y-m-d") if start_date else "",
'end_date': date_format(end_date, "Y-m-d") if end_date else "",
'users': [int(u) for u in users],
'content_types': [int(ct) for ct in content_types],
'filter_form': filter_form,
'SINGLE_LOG_ENTRY_DATE_FORMAT': SINGLE_LOG_ENTRY_DATE_FORMAT,
'LOG_ENTRIES_DAY_HEADINGS_DATE_FORMAT': \
LOG_ENTRIES_DAY_HEADINGS_DATE_FORMAT,
'title': _("Timeline") # For template breadcrumbs, etc.
}
return render_to_response(
template_name, context, context_instance=RequestContext(request)
)
0
Example 26
Project: pyas2 Source File: views.py
@csrf_exempt
def as2receive(request, *args, **kwargs):
"""
Function receives AS2 requests from partners.
Checks whether its an AS2 message or an MDN and acts accordingly.
"""
if request.method == 'POST':
# Process the posted AS2 message
request_body = request.read()
# Create separate raw_payload with only message-id and content type as M2Crypto's signature
# verification method does not like too many header
raw_payload = '%s: %s\n' % ('message-id', request.META['HTTP_MESSAGE_ID'])
raw_payload += '%s: %s\n\n' % ('content-type', request.META['CONTENT_TYPE'])
raw_payload += request_body
# Extract all the relevant headers from the http request
as2headers = ''
for key in request.META:
if key.startswith('HTTP') or key.startswith('CONTENT'):
as2headers += '%s: %s\n' % (key.replace("HTTP_", "").replace("_", "-").lower(), request.META[key])
pyas2init.logger.debug('Recevied an HTTP POST from %s with payload :\n%s' %
(request.META['REMOTE_ADDR'], as2headers + '\n' + request_body))
try:
pyas2init.logger.debug('Check payload to see if its an AS2 Message or ASYNC MDN.')
# Load the request header and body as a MIME Email Message
payload = email.message_from_string(as2headers + '\n' + request_body)
# Get the message sender and receiver AS2 IDs
message_org = as2utils.unescape_as2name(payload.get('as2-to'))
message_partner = as2utils.unescape_as2name(payload.get('as2-from'))
message = None
# Check if this is an MDN message
mdn_message = None
if payload.get_content_type() == 'multipart/report':
mdn_message = payload
elif payload.get_content_type() == 'multipart/signed':
for part in payload.walk():
if part.get_content_type() == 'multipart/report':
mdn_message = part
# If this is an MDN, get the message ID and check if it exists
if mdn_message:
msg_id = None
for part in mdn_message.walk():
if part.get_content_type() == 'message/disposition-notification':
msg_id = part.get_payload().pop().get('Original-Message-ID')
pyas2init.logger.info('Asynchronous MDN received for AS2 message %s to organization %s '
'from partner %s' % (msg_id, message_org, message_partner))
try:
# Get the related organization, partner and message from the db.
org = get_object_or_404(models.Organization, as2_name=message_org)
partner = get_object_or_404(models.Partner, as2_name=message_partner)
message = get_object_or_404(models.Message, message_id=msg_id.strip('<>'), organization=org, partner=partner)
models.Log.objects.create(message=message,
status='S',
text=_(u'Processing asynchronous mdn received from partner'))
as2lib.save_mdn(message, raw_payload)
except Http404:
# Send 404 response
pyas2init.logger.error('Unknown Asynchronous MDN AS2 message %s. '
'Either the partner, org or message was not found in the system' % msg_id)
return HttpResponseServerError(_(u'Unknown AS2 MDN received. Will not be processed'))
except Exception, e:
message.status = 'E'
models.Log.objects.create(message=message,
status='E',
text=_(u'Failed to send message, error is %s' % e))
# Send mail here
as2utils.senderrorreport(message, _(u'Failed to send message, error is %s' % e))
finally:
# Save message and send response to HTTP request
if message:
message.save()
return HttpResponse(_(u'AS2 ASYNC MDN has been received'))
else:
try:
# Process the received AS2 message from partner
# Initialize the processing status variables
status, adv_status, status_message = '', '', ''
pyas2init.logger.info('Received an AS2 message with id %s for organization %s from partner %s' %
(payload.get('message-id'), message_org, message_partner))
# Raise duplicate message error in case message already exists in the system
# TODO: Create composite key (message_id, organization, partner)
if models.Message.objects.filter(message_id=payload.get('message-id').strip('<>')).exists():
message = models.Message.objects.create(
message_id='%s_%s' % (payload.get('message-id').strip('<>'), payload.get('date')),
direction='IN',
status='IP',
headers=as2headers
)
raise as2utils.As2DuplicateDocuement(_(u'An identical message has already '
u'been sent to our server'))
# Create a new message in the system
message = models.Message.objects.create(
message_id=payload.get('message-id').strip('<>'),
direction='IN',
status='IP',
headers=as2headers)
# Process the received payload to extract the actual message from partner
payload = as2lib.save_message(message, payload, raw_payload)
# Get the inbox folder for this partner and organization
output_dir = as2utils.join(pyas2init.gsettings['root_dir'],
'messages',
message.organization.as2_name,
'inbox',
message.partner.as2_name)
# Get the filename from the header and if not there set to message id
if message.partner.keep_filename and payload.get_filename():
filename = payload.get_filename()
else:
filename = '%s.msg' % message.message_id
# Save the message content to the store and inbox
content = payload.get_payload(decode=True)
full_filename = as2utils.storefile(output_dir, filename, content, False)
store_filename = as2utils.storefile(pyas2init.gsettings['payload_receive_store'],
message.message_id,
content,
True)
models.Log.objects.create(message=message,
status='S',
text=_(u'Message has been saved successfully to %s' % full_filename))
message.payload = models.Payload.objects.create(name=filename,
file=store_filename,
content_type=payload.get_content_type())
# Set processing status and run the post receive command.
status = 'success'
as2lib.run_post_receive(message, full_filename)
message.save()
# Catch each of the possible exceptions while processing an as2 message
except as2utils.As2DuplicateDocuement, e:
status = 'warning'
adv_status = 'duplicate-docuement'
status_message = _(u'An error occurred during the AS2 message processing: %s' % e)
except as2utils.As2PartnerNotFound, e:
status = 'error'
adv_status = 'unknown-trading-partner'
status_message = _(u'An error occurred during the AS2 message processing: %s' % e)
except as2utils.As2InsufficientSecurity, e:
status = 'error'
adv_status = 'insufficient-message-security'
status_message = _(u'An error occurred during the AS2 message processing: %s' % e)
except as2utils.As2DecryptionFailed, e:
status = 'decryption-failed'
adv_status = 'error'
status_message = _(u'An error occurred during the AS2 message processing: %s' % e)
except as2utils.As2DecompressionFailed, e:
status = 'error'
adv_status = 'decompression-failed'
status_message = _(u'An error occurred during the AS2 message processing: %s' % e)
except as2utils.As2InvalidSignature, e:
status = 'error'
adv_status = 'integrity-check-failed'
status_message = _(u'An error occurred during the AS2 message processing: %s' % e)
except Exception, e:
txt = traceback.format_exc(None).decode('utf-8', 'ignore')
pyas2init.logger.error(_(u'Unexpected error while processing message %(msg)s, '
u'error:\n%(txt)s'), {'txt': txt, 'msg': message.message_id})
status = 'error'
adv_status = 'unexpected-processing-error'
status_message = _(u'An error occurred during the AS2 message processing: %s' % e)
finally:
# Build the mdn for the message based on processing status
mdn_body, mdn_message = as2lib.build_mdn(message,
status,
adv_status=adv_status,
status_message=status_message)
# Create the mdn response body and return the MDN to the http request
if mdn_body:
mdn_response = HttpResponse(mdn_body, content_type=mdn_message.get_content_type())
for key, value in mdn_message.items():
mdn_response[key] = value
return mdn_response
else:
return HttpResponse(_(u'AS2 message has been received'))
# Catch all exception in case of any kind of error in the system.
except Exception:
txt = traceback.format_exc(None).decode('utf-8', 'ignore')
report_txt = _(u'Fatal error while processing message %(msg)s, '
u'error:\n%(txt)s') % {'txt': txt, 'msg': request.META.get('HTTP_MESSAGE_ID').strip('<>')}
pyas2init.logger.error(report_txt)
return HttpResponseServerError(report_txt)
# Send mail here
# mail_managers(_(u'[pyAS2 Error Report] Fatal
# error%(time)s')%{'time':request.META.get('HTTP_DATE')}, reporttxt)
elif request.method == 'GET':
return HttpResponse(_('To submit an AS2 message, you must POST the message to this URL '))
elif request.method == 'OPTIONS':
response = HttpResponse()
response['allow'] = ','.join(['POST', 'GET'])
return response
0
Example 27
Project: django-cropduster Source File: __init__.py
@csrf_exempt
@login_required
def crop(request):
if request.method == "GET":
return json_error(request, 'crop', action="cropping image",
errors=["Form submission invalid"])
crop_form = CropForm(request.POST, request.FILES, prefix='crop')
if not crop_form.is_valid():
return json_error(request, 'crop', action='submitting form', forms=[crop_form],
log=True, exc_info=full_exc_info())
crop_data = copy.deepcopy(crop_form.cleaned_data)
db_image = Image(image=crop_data['orig_image'])
try:
pil_image = PIL.Image.open(db_image.image.path)
except IOError:
pil_image = None
FormSet = modelformset_factory(Thumb, form=ThumbForm, formset=ThumbFormSet)
thumb_formset = FormSet(request.POST, request.FILES, prefix='thumbs')
if not thumb_formset.is_valid():
return json_error(request, 'crop', action='submitting form', formsets=[thumb_formset],
log=True, exc_info=full_exc_info())
cropped_thumbs = thumb_formset.save(commit=False)
non_model_fields = set(ThumbForm.declared_fields) - set([f.name for f in Thumb._meta.fields])
# The fields we will pull from when populating the ThumbForm initial data
json_thumb_fields = ['id', 'name', 'width', 'height']
thumbs_with_crops = [t for t in cropped_thumbs if t.crop_w and t.crop_h]
thumbs_data = [f.cleaned_data for f in thumb_formset]
standalone_mode = crop_data['standalone']
for i, (thumb, thumb_form) in enumerate(zip(cropped_thumbs, thumb_formset)):
changed_fields = set(thumb_form.changed_data) - non_model_fields
thumb_form._changed_data = list(changed_fields)
thumb_data = thumbs_data[i]
size = thumb_data['size']
if changed_fields & set(['crop_x', 'crop_y', 'crop_w', 'crop_h']):
# Clear existing primary key to force new thumb creation
thumb.pk = None
thumb.width = min(filter(None, [thumb.width, thumb.crop_w]))
thumb.height = min(filter(None, [thumb.height, thumb.crop_h]))
try:
new_thumbs = db_image.save_size(size, thumb, tmp=True, standalone=standalone_mode)
except CropDusterResizeException as e:
return json_error(request, 'crop',
action="saving size", errors=[force_unicode(e)])
if not new_thumbs:
continue
if standalone_mode:
thumb = new_thumbs
new_thumbs = {thumb.name: thumb}
cropped_thumbs[i] = thumb = new_thumbs.get(thumb.name, thumb)
update_props = ['crop_x', 'crop_y', 'crop_w', 'crop_h', 'width', 'height', 'id', 'name']
for prop in update_props:
thumbs_data[i][prop] = getattr(thumb, prop)
thumbs_data[i].update({
'changed': True,
'url': db_image.get_image_url(thumb.name),
})
for name, new_thumb in six.iteritems(new_thumbs):
thumb_data = dict([(k, getattr(new_thumb, k)) for k in json_thumb_fields])
crop_data['thumbs'].update({name: thumb_data})
if new_thumb.reference_thumb_id:
continue
thumbs_data[i]['thumbs'].update({name: thumb_data})
elif thumb.pk and thumb.name and thumb.crop_w and thumb.crop_h:
thumb_path = db_image.get_image_path(thumb.name, tmp=False)
tmp_thumb_path = db_image.get_image_path(thumb.name, tmp=True)
if os.path.exists(thumb_path):
if not thumb_form.cleaned_data.get('changed') or not os.path.exists(tmp_thumb_path):
shutil.copy(thumb_path, tmp_thumb_path)
if not thumb.pk and not thumb.crop_w and not thumb.crop_h:
if not len(thumbs_with_crops):
continue
best_fit = thumb_form.cleaned_data['size'].fit_to_crop(
thumbs_with_crops[0], original_image=pil_image)
if best_fit:
thumbs_data[i].update({
'crop_x': best_fit.box.x1,
'crop_y': best_fit.box.y1,
'crop_w': best_fit.box.w,
'crop_h': best_fit.box.h,
'changed': True,
'id': None,
})
for thumb_data in thumbs_data:
if isinstance(thumb_data['id'], Thumb):
thumb_data['id'] = thumb_data['id'].pk
return HttpResponse(json.dumps({
'crop': crop_data,
'thumbs': thumbs_data,
'initial': True,
}), content_type='application/json')
0
Example 28
Project: coursys Source File: views.py
@requires_role(["TAAD", "GRAD"])
def contracts_csv(request, unit_slug, semester):
hiring_semester = get_object_or_404(HiringSemester,
semester__name=semester,
unit__in=request.units,
unit__label=unit_slug)
contracts = TAContract.objects.signed(hiring_semester)
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'inline; filename="%s.csv"' % (hiring_semester.semester.name)
writer = csv.writer(response)
writer.writerow(['Batch ID', 'Term ID', 'Contract Signed',
'Benefits Indicator', 'EmplID', 'SIN',
'Last Name', 'First Name 1', 'First Name 2',
'Payroll Start Date', 'Payroll End Date',
'Action', 'Action Reason', 'Position Number',
'Job Code', 'Full_Part time', 'Pay Group',
'Employee Class', 'Category', 'Fund',
'Dept ID (cost center)', 'Project', 'Account',
'Prep Units', 'Base Units', 'Appt Comp Freq',
'Semester Base Salary Rate',
'Biweekly Base Salary Pay Rate',
'Hourly Rate', 'Standard Hours', 'Scholarship Rate Code',
'Semester Scholarship Salary Pay Rate',
'Biweekly Scholarship Salary Pay Rate', 'Lump Sum Amount',
'Lump Sum Hours', 'Scholarship Lump Sum', 'Course(s)'])
seq = hiring_semester.next_export_seq()
batchid = '%s_%s_%02i' % (hiring_semester.unit.label,
datetime.date.today().strftime("%Y%m%d"), seq)
for c in contracts:
bu = c.bu
total_bu = c.total_bu
prep_units = c.total_bu - c.bu
signed = 'Y'
benefits = 'Y'
schol_rate = 'TSCH' if c.scholarship_per_bu > 0 else ''
salary_total = c.total_pay
schol_total = c.scholarship_pay
if prep_units == 0:
prep_units = ''
# Build a string of all course offerings tied to this contract to add to the results.
course_list_string = ', '.join([unicode.encode(ta_course.course.name()) for ta_course in c.course.all()])
row = []
#Batch ID
row.append(batchid)
#Term ID
row.append(hiring_semester.semester.name)
#Signed
row.append(signed)
#Benefits Indicator
row.append(benefits)
#Emplid
row.append(c.person.emplid)
#SIN
row.append(c.sin)
#Name
row.extend([c.person.last_name,
c.person.first_name,
c.person.middle_name])
#Payroll Start Date, Payroll End Date
row.append(c.pay_start.strftime("%Y%m%d"))
row.append(c.pay_end.strftime("%Y%m%d"))
#Action, Action Reason
row.append('REH')
row.append('REH')
#Position Number
row.append("%08i" % c.category.account.position_number)
#Job Code
row.append('')
#Full_Part time
row.append('')
#Pay Group
row.append('TSU')
#Employee Class
row.append('')
#Category
row.append(c.category.code)
#Fund
row.append(11)
#Dept ID(cost center)
row.append(hiring_semester.unit.deptid())
#Project
row.append('')
#Account
row.append(c.category.account.account_number)
#Prep Units
row.append(prep_units)
#Base Units
row.append(bu)
#Appt Comp Freq
row.append('T')
#Semester Base Salary Rate
row.append("%2f"%(salary_total,))
#Biweekly Base Salary Rate, Hourly Rate, Standard Hours
row.extend(['','',''])
#Scholarhip Rate Code
row.append(schol_rate)
#Semester Scholarship Salary Pay Rate
row.append(schol_total)
#Biweekly Scholarship Salary Pay Rate, Lump Sum Amount
#Lump Sum Hours, Scholarship Lump Sum
row.extend(['','','',''])
# Course(s)
row.append(course_list_string)
writer.writerow(row)
return response
0
Example 29
Project: django-easyextjs4 Source File: __init__.py
@staticmethod
def Request(pRequest, pRootProject = None, pRootUrl = None, pIndex = 'index.html', pAlias = None):
lRet = HttpResponse(status = 400, content = '<h1>HTTP 400 - Bad Request</h1>The request cannot be fulfilled due to bad syntax.')
# Remove http://<host name>:<port>/ from pRootUrl
pRootUrl = urlparse(pRootUrl).path
# Valid the url.
lPath = urlparse(pRequest.path).path
lMatch = re.match('^/[0-9a-zA-Z\.\/\-\_]*$', lPath)
if lMatch is None:
raise ExtJSError('You have some invalid characters on the Url: "%s"' % pRootUrl)
if pRootUrl is not None:
# If the root for the url is specify check if the Url begin with this path
if lPath.find(pRootUrl) != 0:
raise ExtJSError('Invalid root for the Url: "%s"' % pRootUrl)
# Remove url root from the path
lPath = lPath[len(pRootUrl):]
else:
# If url root is not specify doesn't validate it
pRootUrl = ''
# Detect if the URL it's to return javascript wrapper
lUrlApis = re.search('^(\w*\.js)$', lPath)
if lUrlApis is not None:
lUrlApi = lUrlApis.group(1)
if lUrlApi in Ext.__URLSAPI:
# URL found => Generate javascript wrapper
lRemoteAPI = dict()
for lClass in Ext.__URLSAPI[lUrlApi]:
lExt = lClass.__ExtJS
if lExt.Url not in lRemoteAPI:
# Collect all class with the same Url
lRemoteAPI[lExt.Url] = dict()
lCurrent = lRemoteAPI[lExt.Url]
if 'format' in pRequest.REQUEST and pRequest.REQUEST['format'] == 'json':
# 'descriptor' is need it for Sencha Architect to recognize your API
lCurrent['descriptor'] = lClass.__name__ + '.REMOTING_API'
if lExt.NameSpace is not None:
lCurrent['descriptor'] = lExt.NameSpace + '.' + lCurrent['descriptor']
lCurrent['url'] = lExt.Url
lCurrent['type'] = 'remoting'
if lExt.Id is not None:
lCurrent['id'] = lExt.Id
if lExt.NameSpace is not None:
lCurrent['namespace'] = lExt.NameSpace
lCurrent['actions'] = dict()
lAction = lCurrent['actions']
if len(lExt.StaticMethods) > 0:
# Define a class as an Action with a list of functions
lRemoteMethods = list()
for lMethod in lExt.StaticMethods:
lMethodInfo = lExt.StaticMethods[lMethod]
if not lMethodInfo.NameParams:
lMethodExt = dict(name = lMethod, len = len(lMethodInfo.Args))
else:
# Type not supported with python 2.7 or lower.
if sys.version_info < (3, 0):
lMethodExt = dict(name = lMethod, params = lMethodInfo.Args)
else:
if not lMethodInfo.TypeParams:
lMethodExt = dict(name = lMethod, params = lMethodInfo.Args)
else:
# TODO: support this feature for python 3.x
# Must return something like this :
# "params": [{
# "name": "path",
# "type": "string",
# "pos": 0
# },
raise ExtJSError('Type for parameters not supported yet')
lRemoteMethods.append(lMethodExt)
# Each class is define as an 'Action'
lAction[lClass.__name__] = lRemoteMethods
for lKey in lExt.StaticEvents:
# Each event is define as a Provider for ExtJS. Even if it share the same namespace.
lEvent = lExt.StaticEvents[lKey]
lRemote = dict()
lRemote['url'] = lEvent.Url
lRemote['type'] = 'polling'
if lEvent.Id is not None:
lRemote['id'] = lEvent.Id
if lEvent.NameSpace is not None:
lRemote['namespace'] = lEvent.NameSpace
if lEvent.Params is not None:
lRemote['baseParams'] = lEvent.Params
if lEvent.Interval is not None:
lRemote['interval'] = lEvent.Interval
lRemoteAPI[lEvent.Url] = lRemote
if len(lRemoteAPI) > 0:
lJsonRemoteAPI = json.dumps(lRemoteAPI.values(),default=ExtJsonHandler)
lNameSpace = lClass.__name__
if lExt.NameSpace is not None:
lNameSpace = lExt.NameSpace + '.' + lNameSpace
if 'format' in pRequest.REQUEST and pRequest.REQUEST['format'] == 'json':
# Define JSON format for Sencha Architect
lContent = 'Ext.require(\'Ext.direct.*\');Ext.namespace(\''+ lNameSpace +'\');'+ lNameSpace + '.REMOTING_API = ' + lJsonRemoteAPI[1:len(lJsonRemoteAPI)-1] + ';'
else:
# Otherwise it's return a Javascript. Each javascript must be include under the index.html like this:
# <script type="text/javascript" src="api.js"></script>
# Automatically your API is declare on ExtJS and available on your app.js.
lContent = 'Ext.require(\'Ext.direct.*\');Ext.namespace(\''+ lNameSpace +'\');Ext.onReady( function() { Ext.direct.Manager.addProvider(' + lJsonRemoteAPI[1:len(lJsonRemoteAPI)-1] + ');});'
lRet = HttpResponse(content = lContent, mimetype='application/javascript')
else:
# Detect if the URL it's a RPC or a Poll request
lUrlRPCsorPolls = re.search('^(\w*)$', lPath)
if lUrlRPCsorPolls is not None:
lUrl = lUrlRPCsorPolls.group(1)
if lUrl in Ext.__URLSRPC:
# URL recognize as a RPC
# Extract data from raw post. We can not trust pRequest.POST
lReceiveRPCs = json.loads(pRequest.body)
# Force to be a list of dict
if type(lReceiveRPCs) == dict:
lReceiveRPCs = [lReceiveRPCs]
# Extract URL
lClassesForUrl = Ext.__URLSRPC[lUrl]
# Initialize content
lContent = list()
for lReceiveRPC in lReceiveRPCs:
# Execute each RPC request
lRcvClass = lReceiveRPC['action']
lRcvMethod = lReceiveRPC['method']
# Create name API
lMethodName = lRcvClass + '.' + lRcvMethod
# Prepare answer
lAnswerRPC = dict(type = 'rpc', tid = lReceiveRPC['tid'], action = lRcvClass, method = lRcvMethod)
# Prepare exception
lExceptionData = dict(Url = lUrl, Type = 'rpc', Tid = lReceiveRPC['tid'], Name = lMethodName )
lException = dict(type = 'exception', data = lExceptionData, message = None)
if lRcvClass in lClassesForUrl:
# URL for RPC founded
lClass = lClassesForUrl[lRcvClass]
lExt = lClass.__ExtJS
if lRcvMethod in lExt.StaticMethods:
# Method founded
lMethod = lExt.StaticMethods[lRcvMethod]
# Name used for exception message
if lExt.NameSpace is not None:
lMethodName = lExt.NameSpace + '.' + lMethodName
# Add Id if it's define
if lExt.Id is not None:
lExceptionData['Id'] = lExt.Id
# Extract datas
lArgs = lReceiveRPC['data']
# Control and call method
if lArgs is None:
if len(lMethod.Args) != 0:
lException['message'] = '%s numbers of parameters invalid' % lMethodName
else:
try:
# Call method with no parameter
if lMethod.Session is None:
lRetMethod = lMethod.Call()
else:
lRetMethod = lMethod.Call(pSession = lMethod.Session(pRequest))
if lRetMethod is not None:
lAnswerRPC['result'] = lRetMethod
except Exception as lErr:
lException['message'] = '%s: %s' % (lMethodName, str(lErr))
elif type(lArgs) == list:
if len(lArgs) > len(lMethod.Args):
lException['message'] = '%s numbers of parameters invalid' % lMethodName
else:
try:
# Call method with list of parameters
if lMethod.Session is None:
lRetMethod = lMethod.Call(*lArgs)
else:
lArgs.insert(0,lMethod.Session(pRequest))
lRetMethod = lMethod.Call(*lArgs)
if lRetMethod is not None:
lAnswerRPC['result'] = lRetMethod
except Exception as lErr:
lException['message'] = '%s: %s' % (lMethodName, str(lErr))
elif type(lArgs) == dict:
if not lMethod.NameParams:
lException['message'] = '%s does not support named parameters' % lMethodName
else:
if len(lArgs.keys()) > len(lMethod.Args):
lException['message'] = '%s numbers of parameters invalid' % lMethodName
else:
lInvalidParam = list()
for lParam in lArgs:
if lParam not in lMethod.Args:
lInvalidParam.append(lParam)
if len(lInvalidParam) > 0:
lException['message'] = '%s: Parameters unknown -> %s' % ",".join(lInvalidParam)
else:
try:
# Call method with naming parameters
if lMethod.Session is None:
lRetMethod = lMethod.Call(**lArgs)
else:
lArgs['pSession'] = lMethod.Session(pRequest)
lRetMethod = lMethod.Call(**lArgs)
if lRetMethod is not None:
lAnswerRPC['result'] = lRetMethod
except Exception as lErr:
lException['message'] = '%s: %s' % (lMethodName, str(lErr))
else:
lException['message'] = '%s: API not found' % lMethodName
else:
lException['message'] = '%s: API not found' % lMethodName
if lException['message'] is not None:
lContent.append(lException)
else:
lContent.append(lAnswerRPC)
if len(lContent) > 0:
if len(lContent) == 1:
lRet = HttpResponse(content = json.dumps(lContent[0],default=ExtJsonHandler), mimetype='application/json')
else:
lRet = HttpResponse(content = json.dumps(lContent,default=ExtJsonHandler), mimetype='application/json')
elif lUrl in Ext.__URLSEVT:
# URL Recognize as Poll request. A poll request will be catch by an Ext.StaticEvent.
lClass = Ext.__URLSEVT[lUrl]
lExt = lClass.__ExtJS
lEvent = lExt.StaticEvents[lUrl]
# Define the name of the event this will be fire on ExtJS
if lEvent.EventName is not None:
# Use the one specify with @Ext.StaticEvent parameter pEventName
lEventName = lEvent.Name
else:
# This name is build with the concatanation of the name space, classe name and name event
lEventName = lEvent.Name
if len(lEvent.ClassName) != 0:
lEventName = lEvent.ClassName + '.' + lEvent.Name
if len(lEvent.NameSpace) != 0:
lEventName = lEvent.NameSpace + '.' + lEventName
# Prepare event answer
lAnswerEvent = dict(type = 'event', name = lEventName, data = None)
# Prepare exception
# Data exception have the same structur as define for a method except we don't have Tid information. It set to -1.
lExceptionData = dict(Url = lUrl, Type = 'event', Tid = -1, Name = lEventName )
lException = dict(type = 'exception', data = lExceptionData, message = None)
# Add Id if it's define. With the id on your javascript code you can use something like this:
# Ext.direct.Manager.on('exception', function(e) {
# if (e.data.Type == 'event')
# {
# lPoll = Ext.direct.Manager.getProvider(e.data.Id);
# lPoll.disconnect();
# }
# }
if lEvent.Id is not None:
lAnswerEvent['Id'] = lEvent.Id
lExceptionData['Id'] = lEvent.Id
# Extraction of parameters. For event parameters are in the POST.
# If for a key we don't have a value than mean we received a simple list of parameters direct under the key.
# If the key have a value that mean we have naming parameters
lArgs = None
for lKey in pRequest.POST:
if pRequest.POST[lKey] == '':
if lArgs is None:
lArgs = list()
lArgs.extend(lKey.split(','))
else:
if lArgs is None:
lArgs = dict()
lArgs[lKey] = pRequest.POST[lKey]
# Control and call event
if lArgs is None:
if len(lEvent.Args) != 0:
lException['message'] = '%s numbers of parameters invalid' % lEventName
else:
try:
# Call event with no parameter
if lEvent.Session is None:
lRetEvt = lEvent.Call()
else:
lRetEvt = lEvent.Call(pSession = lEvent.Session(pRequest))
if lRetEvt is not None:
lAnswerEvent['data'] = lRetEvt
except Exception as lErr:
lException['message'] = '%s: %s' % (lEventName, str(lErr))
elif type(lArgs) == list:
if len(lArgs) > len(lEvent.Args):
lException['message'] = '%s numbers of parameters invalid' % lEventName
else:
try:
# Call event with list of parameters
if lEvent.Session is None:
lRetEvt = lEvent.Call(*lArgs)
else:
lArgs.insert(0,lEvent.Session(pRequest))
lRetEvt = lEvent.Call(*lArgs)
if lRetEvt is not None:
lAnswerEvent['data'] = lRetEvt
except Exception as lErr:
lException['message'] = '%s: %s' % (lEventName, str(lErr))
elif type(lArgs) == dict:
if len(lArgs.keys()) > len(lEvent.Args):
lException['message'] = '%s numbers of parameters invalid' % lEventName
else:
lInvalidParam = list()
for lParam in lArgs:
if lParam not in lEvent.Args:
lInvalidParam.append(lParam)
if len(lInvalidParam) > 0:
lException['message'] = '%s: Parameters unknown -> %s' % ",".join(lInvalidParam)
else:
try:
# Call event with naming parameters
if lEvent.Session is None:
lRetEvt = lEvent.Call(**lArgs)
else:
lArgs['pSession'] = lEvent.Session(pRequest)
lRetEvt = lEvent.Call(**lArgs)
if lRetEvt is not None:
lAnswerEvent['data'] = lRetEvt
except Exception as lErr:
lException['message'] = '%s: %s' % (lEventName, str(lErr))
if lException['message'] is not None:
lContent = lException
else:
lContent = lAnswerEvent
lRet = HttpResponse(content = json.dumps(lContent,default=ExtJsonHandler), mimetype='application/json')
if lRet.status_code != 200:
# The URL is not to return the API, not to execute a RPC or an event. It's just to get a file
if pRootProject is not None:
if not os.path.exists(pRootProject):
raise ExtJSError('Invalid root for the project: "%s"' % pRootProject)
else:
# if the root project is not specify get the path of the current folder
pRootProject = os.getcwd()
# The path is empty try to find and load index.html (or the file specify with pIndex)
if len(lPath) == 0:
lPath = pIndex
# Rebuild path to valid it
lPath = os.path.normpath("/".join([pRootProject,lPath]))
lFileName, lFileExt = os.path.splitext(lPath)
# Check if the path exist and if the extension is valid
if not os.path.exists(lPath):
raise ExtJSError('File not found: "%s"' % lPath)
else:
if lFileExt not in ['.html','.css','.js','.png','.jpg','.gif','.json','.xml']:
raise ExtJSError('File extension is invalid: "%s"' % lFileExt)
else:
try:
lMime = mimetypes.types_map[lFileExt]
except Exception as lException:
if isinstance(lException,KeyError) and lFileExt == '.json':
lMime = 'text/json'
else:
raise lException
# TODO: Manage a chache file
lFile = open(lPath)
lContent = lFile.read()
lFile.close()
lRet = HttpResponse(content = lContent, mimetype = lMime)
return lRet
0
Example 30
def schedule_json(request):
"""
Returns information about the schedule.
*No authentication required.*
URL: /<YEAR>/schedule/conference.json
The data returned is in JSON format, and looks like::
[ <slot>, <slot>, ..., <poster>, <poster> ...]
where a slot represents a talk, tutorial, or plenary and looks like::
{
"kind": "talk"|"tutorial"|"plenary",
"name": "Title of talk",
"room": "roomname1, roomname2, ..., roomnameN",
"start": "HH:MM:SS", # ISO format
"end": "HH:MM:SS", # ISO format
"duration": 30, # minutes
"authors" ["author name 1", "author name 2", ..., "author name N"],
"abstract": "Lorem ipsum and so forth and so on",
"description: "Lorem ipsum and so forth and so on",
"conf_key": 27,
"conf_url": "https://conference_domain/path/to/talk",
"video_url": "https://somehost/path/to/video_of_talk",
"slides_url": "https://somehost/path/to/slides_of_talk",
"assets_url": "https://somehost/path/to/assets_for_talk",
"tags": "tag1, tag2, ..., tagN"
}
and a poster looks like::
{
"kind": "poster",
"name": "Title of poster",
"authors" ["author name 1", "author name 2", ..., "author name N"],
"abstract": "Lorem ipsum and so forth and so on",
"description: "Lorem ipsum and so forth and so on",
"room": "roomname1, roomname2, ..., roomnameN",
"start": "HH:MM:SS", # Provided but meaningless, ignore...
"end": "HH:MM:SS", # Provided but meaningless, ignore...
"conf_key": 1227,
"conf_url": "https://conference_domain/path/to/page/about/talk"
}
"""
slots = Slot.objects.all().order_by("start")
data = []
for slot in slots:
if slot.kind.label in ["talk", "tutorial", "plenary"] and slot.content:
slot_data = {
"name": slot.content.title,
"room": ", ".join(room["name"] for room in slot.rooms.values()),
"start": slot.start_date.isoformat(),
"end": slot.end_date.isoformat(),
"duration": slot.duration,
"authors": [s.name for s in slot.content.speakers()],
"abstract": getattr(slot.content.abstract, 'raw', slot.content.abstract),
"description": getattr(slot.content.description, 'raw', slot.content.description),
"conf_key": slot.pk,
"conf_url": "https://%s%s" % (
Site.objects.get_current().domain,
reverse("schedule_presentation_detail", args=[slot.content.pk])
),
"kind": slot.kind.label,
"video_url": slot.content.video_url,
"slides_url": slot.content.slides_url,
"assets_url": slot.content.assets_url,
"tags": "",
}
else:
continue
data.append(slot_data)
for poster in Presentation.objects.filter(section__slug="posters", cancelled=False):
poster_data = {
"name": poster.title,
"authors": [s.name for s in poster.speakers()],
"description": getattr(poster.description, 'raw', poster.description),
"abstract": getattr(poster.abstract, 'raw', poster.abstract),
"room": "Poster Room",
"start": datetime.datetime(2014, 03, 17, 10).isoformat(),
"end": datetime.datetime(2014, 03, 17, 13, 10).isoformat(),
"conf_key": 1000 + poster.pk,
"conf_url": "https://%s%s" % (
Site.objects.get_current().domain,
reverse("schedule_presentation_detail", args=[poster.pk])
),
"kind": "poster",
}
data.append(poster_data)
return HttpResponse(
json.dumps(data, default=json_serializer),
content_type="application/json"
)
0
Example 31
def render_static(request, height=None, width=None, format='png',
background='satellite', bounds=None, center=None, render_srid=3857):
# width and height
width = int(width)
height = int(height)
if width > settings.MAX_IMAGE_DIMENSION or \
height > settings.MAX_IMAGE_DIMENSION or \
width <= 1 or height <= 1:
logging.debug("Invalid size")
return HttpResponseBadRequest("Invalid image size, both dimensions must be in range %i-%i" % (1, settings.MAX_IMAGE_DIMENSION))
# image format
if format not in IMAGE_FORMATS:
logging.error("unknown image format %s" % format)
return HttpResponseBadRequest("Unknown image format, available formats: " + ", ".join(IMAGE_FORMATS))
if format.startswith('png'):
mimetype = 'image/png'
elif format.startswith('jpeg'):
mimetype = 'image/jpeg'
# bounds
bounds_box = None
if bounds:
bounds_components = bounds.split(',')
if len(bounds_components) != 4:
return HttpResponseBadRequest("Invalid bounds, must be 4 , separated numbers")
bounds_components = [float(f) for f in bounds_components]
if not (-180 < bounds_components[0] < 180) or not (-180 < bounds_components[2] < 180):
logging.error("x out of range %f or %f" % (bounds_components[0], bounds_components[2]))
return HttpResponseBadRequest("x out of range %f or %f" % (bounds_components[0], bounds_components[2]))
if not (-90 < bounds_components[1] < 90) or not (-90 < bounds_components[3] < 90):
logging.error("y out of range %f or %f" % (bounds_components[1], bounds_components[3]))
return HttpResponseBadRequest("y out of range %f or %f" % (bounds_components[1], bounds_components[3]))
ll = Point(bounds_components[0], bounds_components[1], srid=4326)
ll.transform(render_srid)
ur = Point(bounds_components[2], bounds_components[3], srid=4326)
ur.transform(render_srid)
bounds_box = mapnik.Box2d(ll.x, ll.y, ur.x, ur.y)
elif center:
center_components = center.split(',')
if len(center_components) != 3:
return HttpResponseBadRequest()
lon = float(center_components[0])
lat = float(center_components[1])
zoom = int(center_components[2])
# todo calc bounds from center and zoom
# baselayer
if background not in settings.BASE_LAYERS and background != 'none':
return HttpResponseNotFound("Background not found")
# GeoJSON post data
if request.method == "POST" and len(request.body):
input_data = json.loads(request.body)
else:
input_data = None
if not bounds and not center and not input_data:
return HttpResponseBadRequest("Bounds, center, or post data is required.")
# initialize map
m = mapnik.Map(width, height)
m.srs = '+init=epsg:' + str(render_srid)
# add a tile source as a background
if background != "none":
background_file = settings.BASE_LAYERS[background]
background_style = mapnik.Style()
background_rule = mapnik.Rule()
background_rule.symbols.append(mapnik.RasterSymbolizer())
background_style.rules.append(background_rule)
m.append_style('background style', background_style)
tile_layer = mapnik.Layer('background')
tile_layer.srs = '+init=epsg:' + str(render_srid)
tile_layer.datasource = mapnik.Gdal(base=settings.BASE_LAYER_DIR, file=background_file)
tile_layer.styles.append('background style')
m.layers.append(tile_layer)
# add features from geojson
if input_data and input_data['type'] == "Feature":
features = [input_data]
elif input_data and input_data['type'] == "FeatureCollection":
if 'features' not in input_data:
return HttpResponseBadRequest()
features = input_data['features']
else:
features = []
logging.debug("Adding %d features to map" % len(features))
geometries = []
point_features = []
fid = 0
for feature in features:
if 'geometry' not in feature:
logging.debug("feature does not have geometry")
return HttpResponseBadRequest("Feature does not have a geometry")
if 'type' not in feature['geometry']:
logging.debug("geometry does not have type")
return HttpResponseBadRequest("Geometry does not have a type")
fid += 1
style_name = str(fid)
if feature['geometry']['type'] == 'Point':
point_features.append(feature)
elif feature['geometry']['type'] in ('LineString', 'MultiLineString'):
if feature['geometry']['type'] == 'LineString':
geos_feature = LineString(feature['geometry']['coordinates'])
elif feature['geometry']['type'] == 'MultiLineString':
rings = feature['geometry']['coordinates']
rings = [[(c[0], c[1]) for c in r] for r in rings]
if len(rings) == 1:
geos_feature = LineString(rings[0])
else:
linestrings = []
for ring in rings:
try:
linestrings.append(LineString(ring))
except Exception, e:
logging.error("Error adding ring: %s", e)
geos_feature = MultiLineString(linestrings)
geos_feature.srid = 4326
geos_feature.transform(render_srid)
geometries.append(geos_feature)
style = mapnik.Style()
line_rule = mapnik.Rule()
style_dict = None
if 'style' in feature:
style_dict = feature['style']
elif 'properties' in feature:
style_dict = feature['properties']
line_rule.symbols.append(line_symbolizer(style_dict))
style.rules.append(line_rule)
m.append_style(style_name, style)
wkt = geos_feature.wkt
line_layer = mapnik.Layer(style_name + ' layer')
line_layer.datasource = mapnik.CSV(inline='wkt\n' + '"' + wkt + '"')
line_layer.styles.append(style_name)
line_layer.srs = '+init=epsg:' + str(render_srid)
m.layers.append(line_layer)
elif feature['geometry']['type'] == 'Polygon':
geos_feature = GEOSGeometry(json.dumps(feature['geometry']))
geos_feature.srid = 4326
geos_feature.transform(render_srid)
geometries.append(geos_feature)
style = mapnik.Style()
rule = mapnik.Rule()
style_dict = None
if 'style' in feature:
style_dict = feature['style']
elif 'properties' in feature:
style_dict = feature['properties']
rule.symbols.append(polygon_symbolizer(style_dict))
rule.symbols.append(line_symbolizer(style_dict))
style.rules.append(rule)
m.append_style(style_name, style)
wkt = geos_feature.wkt
layer = mapnik.Layer(style_name + ' layer')
layer.datasource = mapnik.CSV(inline='wkt\n' + '"' + wkt + '"')
layer.styles.append(style_name)
layer.srs = '+init=epsg:' + str(render_srid)
m.layers.append(layer)
else:
logging.info("Not adding unknown feature type")
# point features are coaslesced into a single layer for efficiency
if len(point_features):
logging.debug("Adding %i point features in 1 layer" % len(point_features))
point_style = mapnik.Style()
point_rule = mapnik.Rule()
point_symbolizer = mapnik.PointSymbolizer()
point_rule.symbols.append(point_symbolizer)
point_style.rules.append(point_rule)
m.append_style('point_style', point_style)
csv = 'wkt\n'
for feature in point_features:
geos_feature = Point(feature['geometry']['coordinates'])
geos_feature.srid = 4326
geos_feature.transform(render_srid)
geometries.append(geos_feature)
csv += '"' + geos_feature.wkt + '"\n'
point_layer = mapnik.Layer('point layer')
point_layer.datasource = mapnik.CSV(inline=csv)
point_layer.styles.append('point_style')
point_layer.srs = '+init=epsg:' + str(render_srid)
m.layers.append(point_layer)
# bounds not in url, calculate from data
if not bounds_box:
geometry_collection = GeometryCollection(geometries)
minx, miny, maxx, maxy = geometry_collection.extent
buffer_size = .2
x_buffer_size = ((maxx - minx) * buffer_size)
y_buffer_size = ((maxy - miny) * buffer_size)
if x_buffer_size == 0: # this can happen if there is only 1 point feature
x_buffer_size = 1000
if y_buffer_size == 0:
y_buffer_size = 1000
bounds_box = mapnik.Box2d(minx - x_buffer_size, miny - y_buffer_size,
maxx + x_buffer_size, maxy + y_buffer_size)
m.zoom_to_box(bounds_box)
# render image
im = mapnik.Image(m.width, m.height)
mapnik.render(m, im)
data = im.tostring(str(format))
if background in settings.BASE_LAYERS_ATTRIBUTION:
image = Image.open(cStringIO.StringIO(data))
if format.startswith('png'):
image = image.convert('RGB') # workaround for Pillow palette bug
add_attribution(image, settings.BASE_LAYERS_ATTRIBUTION[background])
output = cStringIO.StringIO()
match = re.match('^(jpeg|png)(\d{1,3})$', format)
if match:
image_format, quality = match.groups()
quality = int(quality)
if image_format == 'jpeg':
image.save(output, 'jpeg', quality=quality)
else:
image = image.convert('P', palette=Image.ADAPTIVE, colors=quality)
bits = int(log(quality, 2))
image.save(output, 'png', bits=bits)
else:
image.save(output, format)
data = output.getvalue()
output.close()
return HttpResponse(data, content_type=mimetype)
0
Example 32
Project: baruwa Source File: views.py
@login_required
def report(request, report_kind):
"displays a report"
report_kind = int(report_kind)
template = "reports/piereport.html"
active_filters = []
if report_kind == 1:
data = run_query('from_address', {'from_address__exact': ""},
'-num_count', request, active_filters)
pie_data = pack_json_data(data, 'from_address', 'num_count')
report_title = _("Top senders by quantity")
elif report_kind == 2:
data = run_query('from_address', {'from_address__exact': ""},
'-total_size', request, active_filters)
pie_data = pack_json_data(data, 'from_address', 'total_size')
report_title = _("Top senders by volume")
elif report_kind == 3:
data = run_query('from_domain', {'from_domain__exact': ""},
'-num_count', request, active_filters)
pie_data = pack_json_data(data, 'from_domain', 'num_count')
report_title = _("Top sender domains by quantity")
elif report_kind == 4:
data = run_query('from_domain', {'from_domain__exact': ""},
'-total_size', request, active_filters)
pie_data = pack_json_data(data, 'from_domain', 'total_size')
report_title = _("Top sender domains by volume")
elif report_kind == 5:
data = run_query('to_address', {'to_address__exact': ""},
'-num_count', request, active_filters)
pie_data = pack_json_data(data, 'to_address', 'num_count')
report_title = _("Top recipients by quantity")
elif report_kind == 6:
data = run_query('to_address', {'to_address__exact': ""},
'-total_size', request, active_filters)
pie_data = pack_json_data(data, 'to_address', 'total_size')
report_title = _("Top recipients by volume")
elif report_kind == 7:
data = run_query('to_domain', {'to_domain__exact': "",
'to_domain__isnull': False}, '-num_count', request,
active_filters)
pie_data = pack_json_data(data, 'to_domain', 'num_count')
report_title = _("Top recipient domains by quantity")
elif report_kind == 8:
data = run_query('to_domain', {'to_domain__exact': "",
'to_domain__isnull': False}, '-total_size',
request, active_filters)
pie_data = pack_json_data(data, 'to_domain', 'total_size')
report_title = _("Top recipient domains by volume")
elif report_kind == 9:
from baruwa.messages.models import SpamScores
filter_list = []
addrs = []
counts = []
scores = []
act = 3
if not request.user.is_superuser:
addrs = request.session['user_filter']['addresses']
act = request.session['user_filter']['account_type']
if request.session.get('filter_by', False):
filter_list = request.session.get('filter_by')
get_active_filters(filter_list, active_filters)
data = SpamScores.objects.all(request.user, filter_list, addrs, act)
for index, row in enumerate(data):
value = index + 1
scores.append({'value': value, 'text': str(row.score)})
counts.append({'y': int(row.count),
'tooltip': 'Score ' + str(row.score) + ': ' + str(row.count)})
if request.is_ajax():
data = [obj.obj_to_dict() for obj in data]
pie_data = {'scores': scores, 'count': counts}
template = "reports/barreport.html"
report_title = _("Spam Score distribution")
elif report_kind == 10:
data = run_hosts_query(request, active_filters)
pie_data = pack_json_data(data, 'clientip', 'num_count')
if request.is_ajax():
from baruwa.messages.templatetags.messages_extras import \
tds_geoip, tds_hostname
for row in data:
row['country'] = tds_geoip(row['clientip'])
row['hostname'] = tds_hostname(row['clientip'])
report_title = _("Top mail hosts by quantity")
template = "reports/relays.html"
elif report_kind == 11:
from baruwa.messages.models import MessageTotals
filter_list = []
addrs = []
dates = []
mail_total = []
spam_total = []
virus_total = []
size_total = []
act = 3
if not request.user.is_superuser:
addrs = request.session['user_filter']['addresses']
act = request.session['user_filter']['account_type']
if request.session.get('filter_by', False):
filter_list = request.session.get('filter_by')
get_active_filters(filter_list, active_filters)
data = MessageTotals.objects.all(request.user, filter_list, addrs, act)
for row in data:
dates.append(str(row.date))
mail_total.append(int(row.mail_total))
spam_total.append(int(row.spam_total))
virus_total.append(int(row.virus_total))
size_total.append(int(row.size_total))
pie_data = {'dates': [{'value': index + 1, 'text': date}
for index, date in enumerate(dates)],
'mail': [{'y': total,
'tooltip': 'Mail totals on ' + dates[index] + ': ' + str(total)}
for index, total in enumerate(mail_total)],
'spam': [{'y': total,
'tooltip': 'Spam totals on ' + dates[index] + ': ' + str(total)}
for index, total in enumerate(spam_total)],
'virii': [{'y': total,
'tooltip': 'Virus totals on ' + dates[index] + ': ' + str(total)}
for index, total in enumerate(virus_total)],
'volume': size_total,
#'volume_labels': [{'value': total,
#'text': str(filesizeformat(total))} for total in size_total],
'mail_total': sum(mail_total),
'spam_total': sum(spam_total),
'virus_total': sum(virus_total),
'volume_total': sum(size_total)}
try:
vpct = "%.1f" % ((1.0 * sum(virus_total) / sum(mail_total)) * 100)
spct = "%.1f" % ((1.0 * sum(spam_total) / sum(mail_total)) * 100)
except ZeroDivisionError:
vpct = "0.0"
spct = "0.0"
pie_data['vpct'] = vpct
pie_data['spct'] = spct
#graph_totals = {}
if request.is_ajax():
data = [obj.obj_to_dict() for obj in data]
report_title = _("Total messages [ After SMTP ]")
template = "reports/listing.html"
filter_form = FilterForm()
if request.is_ajax():
response = anyjson.dumps({'items': list(data), 'pie_data': pie_data})
return HttpResponse(response,
content_type='application/javascript; charset=utf-8')
else:
if not report_kind in [9, 11]:
pie_data = anyjson.dumps(pie_data)
return render_to_response(template, {'pie_data': pie_data,
'top_items': data, 'report_title': report_title,
'report_kind': report_kind, 'active_filters': active_filters,
'form': filter_form}, context_instance=RequestContext(request))
0
Example 33
Project: transifex Source File: views.py
@login_required
def push_translation(request, project_slug, lang_code, *args, **kwargs):
"""
Client pushes an id and a translation string.
Id is considered to be of the source translation string and the string is
in the target_lang.
FIXME: Docuement in detail the form of the 'strings' POST variable.
"""
logger.debug("POST data when saving translation: %s" % request.POST)
# Permissions handling
# Project should always be available
project = get_object_or_404(Project, slug=project_slug)
team = Team.objects.get_or_none(project, lang_code)
check = ProjectPermission(request.user)
if not check.submit_translations(team or project) and not\
check.maintain(project):
return permission_denied(request)
if not request.POST:
return HttpResponseBadRequest()
data = simplejson.loads(request.raw_post_data)
strings = data["strings"]
try:
target_language = Language.objects.by_code_or_alias(lang_code)
except Language.DoesNotExist:
raise Http404
# This dictionary will hold the results of the save operation and will map
# status code for each translation pushed, to indicate the result on each
# translation push separately.
push_response_dict = {}
# Form the strings dictionary, get as Json object
# The fields are the following:
# id-> source_entity id
# translations-> translation strings (includes all plurals)
# context-> source_entity context
# occurrence-> occurrence (not yet well supported)
# Iterate through all the row data that have been sent.
for row in strings:
source_id = int(row['id'])
try:
source_string = Translation.objects.select_related(depth=1).get(
id=source_id
)
except Translation.DoesNotExist:
# TODO: Log or inform here
push_response_dict[source_id] = { 'status':400,
'message':_("Source string cannot be identified in the DB")}
# If the source_string cannot be identified in the DB then go to next
# translation pair.
continue
if not source_string.resource.accept_translations:
push_response_dict[source_id] = { 'status':400,
'message':_("The resource of this source string is not "
"accepting translations.") }
# If the translated source string is pluralized check that all the
# source language supported rules have been filled in, else return error
# and donot save the translations.
if source_string.source_entity.pluralized:
error_flag = False
for rule in target_language.get_pluralrules():
if rule in row['translations'] and row['translations'][rule] != "":
continue
else:
error_flag = True
if error_flag:
error_flag = False
# Check also if all of them are "". If yes, delete all the plurals!
for rule in target_language.get_pluralrules():
if rule in row['translations'] and row['translations'][rule] == "":
continue
else:
error_flag = True
if error_flag:
push_response_dict[source_id] = { 'status':400,
'message':(_("Cannot save unless plural translations are either "
"completely specified or entirely empty!"))}
# Skip the save as we hit on an error.
continue
try:
msgs = _save_translation(
source_string, row['translations'],
target_language, request.user
)
if not msgs:
push_response_dict[source_id] = {'status': 200}
else:
push_response_dict[source_id] = {
'status': 200, 'message': msgs[-1]
}
except LotteBadRequestError, e:
push_response_dict[source_id] = {
'status': 400, 'message': e.message
}
except Exception, e:
logger.error(
"Unexpected exception raised: %s" % e.message, exc_info=True
)
push_response_dict[source_id] = {
'status': 400, 'message': e.message
}
json_dict = simplejson.dumps(push_response_dict)
return HttpResponse(json_dict, mimetype='application/json')
0
Example 34
Project: django-axes Source File: decorators.py
def watch_login(func):
"""
Used to decorate the django.contrib.admin.site.login method.
"""
def decorated_login(request, *args, **kwargs):
# share some useful information
if func.__name__ != 'decorated_login':
log.info('Calling decorated function: %s' % func)
if args: log.info('args: %s' % args)
if kwargs: log.info('kwargs: %s' % kwargs)
# call the login function
response = func(request, *args, **kwargs)
if func.__name__ == 'decorated_login':
# if we're dealing with this function itself, don't bother checking
# for invalid login attempts. I suppose there's a bunch of
# recursion going on here that used to cause one failed login
# attempt to generate 10+ failed access attempt records (with 3
# failed attempts each supposedly)
return response
if request.method == 'POST':
failures = 0
# see if the login was successful
login_unsuccessful = (
response and
not response.has_header('location') and
response.status_code != 302
)
attempt = get_user_attempt(request)
if attempt:
failures = attempt.failures_since_start
if login_unsuccessful:
# add a failed attempt for this user
failures += 1
log.info('-' * 79)
# Create an AccessAttempt record if the login wasn't successful
if login_unsuccessful:
# has already attempted, update the info
if attempt:
log.info('=================================')
log.info('Updating access attempt record...')
log.info('=================================')
attempt.get_data = '%s\n---------\n%s' % (
attempt.get_data,
query2str(request.GET.items()),
)
attempt.post_data = '%s\n---------\n%s' % (
attempt.post_data,
query2str(request.POST.items())
)
attempt.http_accept = request.META.get('HTTP_ACCEPT', '<unknown>')
attempt.path_info = request.META.get('PATH_INFO', '<unknown>')
attempt.failures_since_start = failures
attempt.attempt_time = datetime.datetime.now()
attempt.save()
else:
log.info('=================================')
log.info('Creating access attempt record...')
log.info('=================================')
ip = request.META.get('REMOTE_ADDR', '')
ua = request.META.get('HTTP_USER_AGENT', '<unknown>')
attempt = AccessAttempt.objects.create(
user_agent=ua,
ip_address=ip,
get_data=query2str(request.GET.items()),
post_data=query2str(request.POST.items()),
http_accept=request.META.get('HTTP_ACCEPT', '<unknown>'),
path_info=request.META.get('PATH_INFO', '<unknown>'),
failures_since_start=failures
)
# no matter what, we want to lock them out
# if they're past the number of attempts allowed
if failures > FAILURE_LIMIT:
if LOCK_OUT_AT_FAILURE:
response = HttpResponse("Account locked: too many login attempts. "
"Contact an admin to unlock your account."
)
# We log them out in case they actually managed to enter
# the correct password.
logout(request)
return response
return decorated_login
0
Example 35
Project: treeio Source File: rendering.py
def render_to_response(template_name, context=None, context_instance=None, response_format='html'):
"Extended render_to_response to support different formats"
if context is None:
context = {}
if not response_format:
response_format = 'html'
if response_format not in settings.HARDTREE_RESPONSE_FORMATS:
response_format = 'html'
content_type = settings.HARDTREE_RESPONSE_FORMATS[response_format]
if 'pdf' in response_format:
while True:
hasher = hashlib.md5()
hasher.update(str(random.random()))
filepath = u"pdfs/" + hasher.hexdigest()
output = settings.MEDIA_ROOT + filepath
if not os.path.exists(output + ".pdf"):
break
while True:
hasher = hashlib.md5()
hasher.update(str(random.random()))
filepath = hasher.hexdigest() + ".html"
source = getattr(settings, 'WKCWD', './') + filepath
if not os.path.exists(source):
break
page_size = "A4"
orientation = "portrait"
rendered_string = render_to_string(
template_name, context, context_instance, response_format)
f = codecs.open(source, encoding='utf-8', mode='w')
pdf_string = unicode(rendered_string)
if context_instance and context_instance['request']:
pdf_string = pdf_string.replace(
"a href=\"/", "a href=\"http://" + RequestSite(context_instance['request']).domain + "/")
pdf_string.replace("href=\"/", "href=\"")
pattern = """Content-Type: text/html|<td>\n\W*<div class="content-list-tick">\n\W.*\n.*</div></td>|<th scope="col">Select</th>"""
pdf_string = re.sub(pattern, "", pdf_string).replace(
'/static/', 'static/')
f.write(pdf_string)
f.close()
wkpath = getattr(settings, 'WKPATH', './bin/wkhtmltopdf-i386')
x = subprocess.Popen("%s --print-media-type --orientation %s --page-size %s %s %s" %
(wkpath,
orientation,
page_size,
source,
output),
shell=True,
cwd=getattr(settings, 'WKCWD', './'))
x.wait()
f = open(output)
response = HttpResponse(f.read(), content_type='application/pdf')
f.close()
os.remove(output)
os.remove(source)
# response['Content-Disposition'] = 'attachment; filename=%s'%(pdf_name)
return response
if 'ajax' in response_format:
rendered_string = render_to_ajax(
template_name, context, context_instance)
else:
if response_format == 'html' and context_instance and context_instance['request'].path[:3] == '/m/':
context['response_format'] = response_format = 'mobile'
if getattr(settings, 'HARDTREE_FORCE_AJAX_RENDERING', False):
context = preprocess_context_ajax(context)
rendered_string = render_to_string(
template_name, context, context_instance, response_format)
response = HttpResponse(rendered_string, content_type=content_type)
return response
0
Example 36
Project: django-vcs-watch Source File: views.py
def object_list(request,
cls,
query = {},
paginate_by = None,
page = None,
allow_empty = True,
template_name = None,
template_loader = loader,
extra_context = {},
context_processors = None,
template_object_name = 'object_list',
mimetype = None,
map_func = lambda x: x,
**kwargs):
"""
Generic list of objects.
Templates: ``<collection_name>_list.html``
Context:
object_list
list of objects
is_paginated
are the results paginated?
results_per_page
number of objects per page (if paginated)
has_next
is there a next page?
has_previous
is there a prev page?
page
the current page
next
the next page
previous
the previous page
pages
number of pages, total
hits
number of objects, total
last_on_page
the result number of the last of object in the
object_list (1-indexed)
first_on_page
the result number of the first object in the
object_list (1-indexed)
page_range:
A list of the page numbers (1-indexed).
"""
if callable(extra_context):
extra_context = extra_context(request, **kwargs)
if callable(query):
query = query(request, **kwargs)
cursor = cls.objects.find(query)
if paginate_by:
paginator = Paginator(cursor, paginate_by, allow_empty_first_page=allow_empty)
if not page:
page = request.GET.get('page', 1)
try:
page_number = int(page)
except ValueError:
if page == 'last':
page_number = paginator.num_pages
else:
# Page is not 'last', nor can it be converted to an int.
raise Http404
try:
page_obj = paginator.page(page_number)
except InvalidPage:
raise Http404
c = RequestContext(request, {
template_object_name: map(map_func, page_obj.object_list),
'paginator': paginator,
'page_obj': page_obj,
# Legacy template context stuff. New templates should use page_obj
# to access this instead.
'is_paginated': page_obj.has_other_pages(),
'results_per_page': paginator.per_page,
'has_next': page_obj.has_next(),
'has_previous': page_obj.has_previous(),
'page': page_obj.number,
'next': page_obj.next_page_number(),
'previous': page_obj.previous_page_number(),
'first_on_page': page_obj.start_index(),
'last_on_page': page_obj.end_index(),
'pages': paginator.num_pages,
'hits': paginator.count,
'page_range': paginator.page_range,
}, context_processors)
else:
c = RequestContext(request, {
template_object_name: map(map_func, cursor),
'paginator': None,
'page_obj': None,
'is_paginated': False,
}, context_processors)
if not allow_empty and len(cursor) == 0:
raise Http404
for key, value in extra_context.items():
if callable(value):
c[key] = value()
else:
c[key] = value
if not template_name:
template_name = "%s_list.html" % cls.objects.collection_name
t = template_loader.get_template(template_name)
return HttpResponse(t.render(c), mimetype=mimetype)
0
Example 37
@need_basicauth
@csrf_exempt
def container(request, id):
customer = request.user.customer
try:
container = customer.container_set.get(pk=(int(id) - UWSGI_IT_BASE_UID))
except:
return HttpResponseForbidden(json.dumps({'error': 'Forbidden'}), content_type="application/json")
if request.method == 'POST':
response = check_body(request)
if response:
return response
allowed_keys = (
'name', 'note', 'quota_threshold', 'jid', 'jid_secret',
'jid_destinations', 'nofollow', 'pushover_user',
'pushover_token', 'pushover_sound', 'alarm_freq',
'pushbullet_token', 'slack_webhook',
'custom_distros_storage',
)
j = json.loads(request.read())
if not j:
return HttpResponseForbidden(json.dumps({'error': 'Forbidden'}), content_type="application/json")
for key in j:
if key in allowed_keys:
setattr(container, key, j[key])
if 'ssh_keys' in j:
container.ssh_keys_raw = '\n'.join(j['ssh_keys'])
container.ssh_keys_mtime = datetime.datetime.now()
if 'distro' in j:
container.distro = Distro.objects.get(pk=j['distro'])
if 'custom_distro' in j:
container.custom_distro = CustomDistro.objects.filter(pk=j['custom_distro'], container__server=container.server, container__customer=customer).exclude(container=container)[0]
if 'memory' in j:
if container.server.owner == customer:
container.memory = int(j['memory'])
if 'storage' in j:
if container.server.owner == customer:
container.storage = int(j['storage'])
if 'tags' in j:
new_tags = []
for tag in j['tags']:
try:
new_tags.append(Tag.objects.get(customer=customer, name=tag))
except:
pass
container.tags = new_tags
# linking and unlinking requires reboot
if 'link' in j:
try:
link = ContainerLink()
link.container = container
link.to = Container.objects.get(pk=(int(j['link']) - UWSGI_IT_BASE_UID))
link.full_clean()
link.save()
container.last_reboot = datetime.datetime.now()
except:
response = HttpResponse(json.dumps({'error': 'Conflict'}), content_type="application/json")
response.status_code = 409
return response
if 'unlink' in j:
try:
link = container.containerlink_set.get(to=(int(j['unlink']) - UWSGI_IT_BASE_UID))
link.delete()
container.last_reboot = datetime.datetime.now()
except:
response = HttpResponse(json.dumps({'error': 'Conflict'}), content_type="application/json")
response.status_code = 409
return response
if 'reboot' in j:
container.last_reboot = datetime.datetime.now()
container.full_clean()
container.save()
c = {
'uid': container.uid,
'name': container.name,
'hostname': container.hostname,
'ip': str(container.ip),
'memory': container.memory,
'storage': container.storage,
'uuid': container.uuid,
'distro': None,
'distro_name': None,
'server': container.server.name,
'server_address': container.server.address,
'jid': container.jid,
'jid_destinations': container.jid_destinations,
'pushover_user': container.pushover_user,
'pushover_token': container.pushover_token,
'pushover_sound': container.pushover_sound,
'pushbullet_token': container.pushbullet_token,
'slack_webhook': container.slack_webhook,
'alarm_freq': container.alarm_freq,
'quota_threshold': container.quota_threshold,
'nofollow': container.nofollow,
'note': container.note,
'linked_to': container.linked_to,
'custom_distros_storage': container.custom_distros_storage,
'custom_distro': None,
'ssh_keys': container.ssh_keys,
'tags': [t.name for t in container.tags.all()],
'legion_address': [l.address for l in container.server.legion_set.all()]
}
if container.distro:
c['distro'] = container.distro.pk
c['distro_name'] = container.distro.name
if container.custom_distro:
c['custom_distro'] = container.custom_distro.pk
c['custom_distro_name'] = container.custom_distro.name
return spit_json(request, c)
0
Example 38
@vary_on_headers('Authorization')
def __call__(self, request, *args, **kwargs):
"""
NB: Sends a `Vary` header so we don't cache requests
that are different (OAuth stuff in `Authorization` header.)
"""
rm = request.method.upper()
# Django's internal mechanism doesn't pick up
# PUT request, so we trick it a little here.
if rm == "PUT":
coerce_put_post(request)
actor, anonymous = self.authenticate(request, rm)
if anonymous is CHALLENGE:
return actor()
else:
handler = actor
# Translate nested datastructs into `request.data` here.
if rm in ('POST', 'PUT'):
try:
translate_mime(request)
except MimerDataException:
return rc.BAD_REQUEST
if not hasattr(request, 'data'):
if rm == 'POST':
request.data = request.POST
else:
request.data = request.PUT
if not rm in handler.allowed_methods:
return HttpResponseNotAllowed(handler.allowed_methods)
meth = getattr(handler, self.callmap.get(rm), None)
if not meth:
raise Http404
# Support emitter both through (?P<emitter_format>) and ?format=emitter.
em_format = self.determine_emitter(request, *args, **kwargs)
kwargs.pop('emitter_format', None)
# Clean up the request object a bit, since we might
# very well have `oauth_`-headers in there, and we
# don't want to pass these along to the handler.
request = self.cleanup_request(request)
try:
result = meth(request, *args, **kwargs)
except Exception, e:
result = self.error_handler(e, request, meth)
emitter, ct = Emitter.get(em_format)
fields = handler.fields
if hasattr(handler, 'list_fields') and (
isinstance(result, list) or isinstance(result, QuerySet)):
fields = handler.list_fields
status_code = 200
# If we're looking at a response object which contains non-string
# content, then assume we should use the emitter to format that
# content
if isinstance(result, HttpResponse) and not result._is_string:
status_code = result.status_code
# Note: We can't use result.content here because that method attempts
# to convert the content into a string which we don't want.
# when _is_string is False _container is the raw data
result = result._container
srl = emitter(result, typemapper, handler, fields, anonymous)
try:
"""
Decide whether or not we want a generator here,
or we just want to buffer up the entire result
before sending it to the client. Won't matter for
smaller datasets, but larger will have an impact.
"""
if self.stream: stream = srl.stream_render(request)
else: stream = srl.render(request)
if not isinstance(stream, HttpResponse):
resp = HttpResponse(stream, mimetype=ct, status=status_code)
else:
resp = stream
resp.streaming = self.stream
return resp
except HttpStatusCode, e:
return e.response
0
Example 39
def __param(method_name, *p_args, **p_kwargs):
"""
@get('param1', 'param2')
@get(param1={'name':'parameter_name', 'type':int, 'default':0})
@get(param1={'type':int, 'default':0})
@get(param1={'type':int })
@get(param1=('param_name', int, 0))
@get(param1=(int, 0))
@get(param1=int)
"""
def paramed_decorator(func):
@functools.wraps(func)
def decorated(*args, **kwargs):
request = args[0]
req_param = deepcopy(request.GET)
req_param.update(request.POST)
m = {'get': request.GET, 'post': request.POST, 'param': req_param}
method = m[method_name]
for k, v in p_kwargs.items():
_name = None
_type = None
_default = None
# logging.debug(v)
if type(v) == str:
_type = str
_name = v
elif type(v) == dict:
if 'name' in v:
_name = v['name']
if 'type' in v:
_type = v['type']
if 'default' in v:
_default = v['default']
elif type(v) == tuple and len(v) == 3:
_name = v[0]
_type = v[1]
_default = v[2]
elif type(v) == tuple and len(v) == 2:
_type = v[0]
_default = v[1]
elif type(v) == type:
_type = v
elif v in (_Type.str_list, _Type.int_list, _Type.json, _Type.file):
_type = v
if _name is None:
_name = k
if _type is None:
_type = str
has_key = True
try:
if _type == _Type.file:
if method_name != 'post':
return HttpResponse(
json.dumps({'rt': False,
'message': "The file parameter <{}> should in POST method".format(
_name)}, separators=(',', ':')),
content_type=CONTENT_TYPE_JSON)
origin_v = request.FILES.get(_name, None)
else:
origin_v = ','.join(method.getlist(_name)).strip()
if len(origin_v) == 0:
has_key = False
except KeyError:
has_key = False
if has_key:
if _type == bool:
origin_v = origin_v.lower()
if origin_v == 'false' or origin_v == '0' or origin_v == 'off':
value = False
elif origin_v == 'true' or origin_v == 'on':
value = True
else:
value = bool(origin_v)
elif _type == _Type.str_list:
value = [item for item in origin_v.split(',') if len(item) > 0]
elif _type == _Type.int_list:
value = [int(item) for item in origin_v.split(',')]
elif _type == _Type.json:
try:
value = json.loads(origin_v)
except ValueError:
return HttpResponse(
json.dumps({'rt': False, 'message': "No JSON object could be decoded"},
separators=(',', ':')),
content_type=CONTENT_TYPE_JSON)
elif _type == _Type.file:
value = origin_v
pass
elif _type == str:
value = origin_v
else:
value = _type(origin_v)
else:
if _default is not None:
value = _default
else:
return HttpResponse(
json.dumps({'rt': False, 'message': 'Please specify the parameter : ' + _name + ";"},
separators=(',', ':')),
content_type=CONTENT_TYPE_JSON)
kwargs.update({k: value})
for k in p_args:
try:
kwargs.update({k: method[k].encode('utf-8')})
except KeyError:
return HttpResponse(json.dumps({'rt': False, 'message': 'Please specify the parameter : ' + k},
separators=(',', ':')),
content_type=CONTENT_TYPE_JSON)
return func(*args, **kwargs)
return decorated
return paramed_decorator
0
Example 40
Project: syndicate Source File: views.py
@verifyownership_private
@authenticate
def deletevolume(request, volume_id):
'''
View for deleting volumes. Since so many other entites have properties related
to volume ID's, numerous db updates need to be checked. CQ, they are all grouped
together into the transactional helper method multi_update().
'''
# Clear out volume_id in properties for users, UG's, AG's, and RG's.
@transactional(xg=True)
def multi_update(vol, users, usergateways, acquisitiongateways, replicagateways):
v_id = vol.volume_id
db.delete_volume(v_id)
logging.info(users)
for user in users:
fields = {}
if v_id in user.volumes_o:
new_volumes_o = user.volumes_o
new_volumes_o.remove(v_id)
fields['volumes_o'] = new_volumes_o
if v_id in user.volumes_rw:
new_volumes_rw = user.volumes_rw
new_volumes_rw.remove(v_id)
fields['volumes_rw'] = new_volumes_rw
if v_id in user.volumes_r:
new_volumes_r = user.volumes_r
new_volumes_r.remove(v_id)
fields['volumes_r'] = new_volumes_r
if fields:
db.update_user(user.email, **fields)
for ug in usergateways:
fields = {}
fields['volume_id'] = 0
db.update_user_gateway(ug.g_id, **fields)
for ag in acquisitiongateways:
logging.info(ag)
fields = {}
new_ids = ag.volume_ids.remove(v_id)
if not new_ids:
fields['volume_ids'] = []
else:
fields['volume_ids'] = new_ids
db.update_acquisition_gateway(ag.g_id, **fields)
for rg in replicagateways:
fields = {}
new_ids = rg.volume_ids.remove(v_id)
if not new_ids:
fields['volume_ids'] = []
else:
fields['volume_ids'] = new_ids
db.update_replica_gateway(rg.g_id, **fields)
# Clear initial data session variable to prevent stale tables in ag.views.viewgateway and rg.views.viewgateway
session.pop("rg_initial_data" + str(v_id), None)
session.pop("ag_initial_data" + str(v_id), None)
# Clear initial data session variable to prevent stale data in volume settings, change rgs, and change ags.
session.pop("volume_initial_ags" + str(v_id), None)
session.pop("volume_initial_rgs" + str(v_id), None)
session = request.session
message = session.pop('message', "")
username = session['login_email']
vol = db.read_volume( volume_id )
if not vol:
return redirect('django_volume.views.viewvolume', volume_id)
if request.method == "POST":
form = forms.DeleteVolume(request.POST)
if form.is_valid():
# Check password hash
hash_check = Volume.generate_password_hash(form.cleaned_data['password'], vol.volume_secret_salt)
if hash_check == vol.volume_secret_salted_hash:
# Ok to delete
attrs = {}
users = db.list_users({'SyndicateUser.volumes_rw ==':vol.volume_id})
users.extend(db.list_users({'SyndicateUser.volumes_r ==':vol.volume_id}))
ags = db.list_acquisition_gateways_by_volume(vol.volume_id)
rgs = db.list_replica_gateways_by_volume(vol.volume_id)
ugs = db.list_user_gateways_by_volume(vol.volume_id)
try:
multi_update(vol, users, ugs, ags, rgs)
except Exception as e:
logging.error("Unable to delete volume %s" % e)
session['message'] = "Unable to delete volume."
return redirect('django_volume.views.deletevolume', volume_id=vol.volume_id)
session['new_change'] = "We've deleted your volume."
session['next_url'] = '/syn/volume/myvolumes/'
session['next_message'] = "Click here to go back to your volumes."
return redirect('/syn/thanks')
else:
session['message'] = "Invalid password"
return redirect('django_volume.views.deletevolume', volume_id=vol.volume_id)
else:
session['message'] = "Please fill out all entries"
return redirect('django_volume.views.deletevolume', vol.volume_id)
else:
form = forms.DeleteVolume()
t = loader.get_template('deletevolume.html')
c = RequestContext(request, {'username':username, 'form':form, 'message':message,'volume':vol} )
return HttpResponse(t.render(c))
0
Example 41
Project: zorna Source File: views.py
def edit_page(request):
b_pages_manager, b_templates_manager = get_pages_access(request.user)
if b_pages_manager:
from zorna.utils import get_context_text
page = request.REQUEST.get('file', '')
path_tf = os.path.join(
settings.PROJECT_PATH, settings.ZORNA_CONTENT, page)
header, text = get_context_text(path_tf)
blocks = get_blocks(request, text)
import yaml
context_yaml = yaml.load(header)
if request.method == 'POST':
docuement = ''
try:
import codecs
lflr = '\r\n'
fd = codecs.open(path_tf, "r+", "utf-8")
text = fd.read()
to_add = []
for key, value in request.POST.iteritems():
if key[0:2] == '__':
docuement = docuement + ' ' + value
repl = "%s block %s %s\n%s\n%s endblock %s" % (
'{%', key, '%}', value, '{%', '%}')
pre = re.compile(r'(%s\s*block\s*%s\s*%s)(.*?)(%s\s*endblock.*?\s*%s)' % (re.escape(
'{%'), key, re.escape('%}'), re.escape('{%'), re.escape('%}')), re.M | re.DOTALL)
if pre.search(text):
text = pre.sub(repl, text)
else:
to_add.append(repl + lflr)
description = request.POST.get(
"description", '').replace('\n', '')
tab_ctx = {'title': request.POST.get("title", ''), 'description': description, 'keywords': request.POST.get(
"keywords", ''), 'created': str(datetime.datetime.now()), 'author': str(request.user.pk)}
if not header:
context_yaml = tab_ctx
else:
context_yaml.update(tab_ctx)
result = ''
for k, v in context_yaml.iteritems():
if k in request.POST:
v = request.POST.get(k, '').replace('\n', '')
result = result + k + ": '%s'%s" % (
v.replace("'", "''"), lflr)
ctx = "%s zorna %s%s%s%s" % ('{%', lflr, result, lflr, '%}')
pre = re.compile(r'(%s\s*zorna)(.*?)(\s*%s)' % (
re.escape('{%'), re.escape('%}')), re.M | re.DOTALL)
if pre.search(text):
text = pre.sub(ctx, text)
else:
text = text + lflr + ctx
what = request.REQUEST.get('what', 'save')
if what == 'save':
fd.seek(0)
fd.truncate()
fd.write(text)
fd.close()
zorna_page_save.send(
None, created=False, content=docuement, title=request.POST.get("title", page), url=page)
else:
# create temporary file
head, tail = os.path.split(page)
if head:
head = head + '/'
temp_page = head + 'temp-%s' % tail
path_tempf = os.path.join(
settings.PROJECT_PATH, settings.ZORNA_CONTENT, temp_page)
fd = open(path_tempf, 'w+')
fd.write(text.encode('UTF-8'))
fd.close()
return HttpResponseRedirect(reverse('preview_page', args=[os.path.splitext(temp_page)[0]]))
except Exception as e:
ret = {'status': 'error', 'message': 'Error: %s' % str(e)}
return HttpResponse(simplejson.dumps(ret))
ret = {'status': 'success', 'message':
'Your changes have been saved successfully.'}
return HttpResponse(simplejson.dumps(ret))
form = PageEditFileForm(extra=blocks, request=request)
if header:
initial_data = {}
initial_data['title'] = context_yaml[
'title'] if 'title' in context_yaml else ''
initial_data['description'] = context_yaml[
'description'] if 'description' in context_yaml else ''
initial_data['keywords'] = context_yaml[
'keywords'] if 'keywords' in context_yaml else ''
for e in ['author', 'created', 'title', 'keywords', 'description']:
if e in context_yaml:
del context_yaml[e]
form_context = PageEditFileContextForm(
initial=initial_data, extra=context_yaml)
else:
form_context = None
extra_context = {'form_context': form_context, 'form':
form, 'cdir_components': format_components(page), 'template_file': page}
context = RequestContext(request)
return render_to_response('pages/fm_edit_file.html', extra_context, context_instance=context)
else:
return HttpResponse('')
0
Example 42
Project: authomatic Source File: views.py
def login(request, provider_name):
# We we need the response object for the adapter.
response = HttpResponse()
# Start the login procedure.
result = authomatic.login(DjangoAdapter(request, response), provider_name)
# If there is no result, the login procedure is still pending.
# Don't write anything to the response if there is no result!
if result:
# If there is result, the login procedure is over and we can write to response.
response.write('<a href="..">Home</a>')
if result.error:
# Login procedure finished with an error.
response.write('<h2>Damn that error: {0}</h2>'.format(result.error.message))
elif result.user:
# Hooray, we have the user!
# OAuth 2.0 and OAuth 1.0a provide only limited user data on login,
# We need to update the user to get more info.
if not (result.user.name and result.user.id):
result.user.update()
# Welcome the user.
response.write(u'<h1>Hi {0}</h1>'.format(result.user.name))
response.write(u'<h2>Your id is: {0}</h2>'.format(result.user.id))
response.write(u'<h2>Your email is: {0}</h2>'.format(result.user.email))
# Seems like we're done, but there's more we can do...
# If there are credentials (only by AuthorizationProvider),
# we can _access user's protected resources.
if result.user.credentials:
# Each provider has it's specific API.
if result.provider.name == 'fb':
response.write('Your are logged in with Facebook.<br />')
# We will access the user's 5 most recent statuses.
url = 'https://graph.facebook.com/{0}?fields=feed.limit(5)'
url = url.format(result.user.id)
# Access user's protected resource.
access_response = result.provider.access(url)
if access_response.status == 200:
# Parse response.
statuses = access_response.data.get('feed').get('data')
error = access_response.data.get('error')
if error:
response.write(u'Damn that error: {0}!'.format(error))
elif statuses:
response.write('Your 5 most recent statuses:<br />')
for message in statuses:
text = message.get('message')
date = message.get('created_time')
response.write(u'<h3>{0}</h3>'.format(text))
response.write(u'Posted on: {0}'.format(date))
else:
response.write('Damn that unknown error!<br />')
response.write(u'Status: {0}'.format(response.status))
if result.provider.name == 'tw':
response.write('Your are logged in with Twitter.<br />')
# We will get the user's 5 most recent tweets.
url = 'https://api.twitter.com/1.1/statuses/user_timeline.json'
# You can pass a dictionary of querystring parameters.
access_response = result.provider.access(url, {'count': 5})
# Parse response.
if access_response.status == 200:
if type(access_response.data) is list:
# Twitter returns the tweets as a JSON list.
response.write('Your 5 most recent tweets:')
for tweet in access_response.data:
text = tweet.get('text')
date = tweet.get('created_at')
response.write(u'<h3>{0}</h3>'.format(text))
response.write(u'Tweeted on: {0}'.format(date))
elif response.data.get('errors'):
response.write(u'Damn that error: {0}!'.\
format(response.data.get('errors')))
else:
response.write('Damn that unknown error!<br />')
response.write(u'Status: {0}'.format(response.status))
return response
0
Example 43
Project: django-academicstoday Source File: overview.py
@login_required(login_url='/landpage')
def submit_course_for_review(request, course_id):
course = Course.objects.get(id=course_id)
response_data = {'status' : 'failed', 'message' : ''}
# Validate announcements
try:
announcements = Announcement.objects.filter(course=course).order_by('-post_date')
if announcements.count() < 1:
response_data['message'] = 'zero announcements'
return HttpResponse(json.dumps(response_data), content_type="application/json")
except Announcement.DoesNotExist:
response_data['message'] = 'no announcements detected'
return HttpResponse(json.dumps(response_data), content_type="application/json")
# Validate syllabus
try:
Syllabus.objects.get(course=course)
except Syllabus.DoesNotExist:
response_data['message'] = 'no syllabus set'
return HttpResponse(json.dumps(response_data), content_type="application/json")
# Validate policy
try:
Policy.objects.get(course=course)
except Policy.DoesNotExist:
response_data['message'] = 'no policy set'
return HttpResponse(json.dumps(response_data), content_type="application/json")
# Validate lectures
try:
lectures = Lecture.objects.filter(course=course).order_by('-lecture_num')
if lectures.count() < 2:
response_data['message'] = 'minimum 2 lectures required'
return HttpResponse(json.dumps(response_data), content_type="application/json")
except Lecture.DoesNotExist:
response_data['message'] = 'no policy set'
return HttpResponse(json.dumps(response_data), content_type="application/json")
# Validate assignments
try:
assignments = Assignment.objects.filter(course=course).order_by('-assignment_num')
if assignments.count() < 1:
response_data['message'] = 'minimum 1 assignment required'
return HttpResponse(json.dumps(response_data), content_type="application/json")
except Assignment.DoesNotExist:
response_data['message'] = 'no assignment(s)'
return HttpResponse(json.dumps(response_data), content_type="application/json")
# Validate quizzes
try:
quizzes = Quiz.objects.filter(course=course).order_by('-quiz_num')
if quizzes.count() < 1:
response_data['message'] = 'minimum 1 quiz required'
return HttpResponse(json.dumps(response_data), content_type="application/json")
except Quiz.DoesNotExist:
response_data['message'] = 'no quiz(zes) found'
return HttpResponse(json.dumps(response_data), content_type="application/json")
# Validate exams
try:
exams = Exam.objects.filter(course=course).order_by('-exam_num')
if exams.count() < 1:
response_data['message'] = 'minimum 1 exam required'
return HttpResponse(json.dumps(response_data), content_type="application/json")
except Exam.DoesNotExist:
response_data['message'] = 'no exams(s) found'
return HttpResponse(json.dumps(response_data), content_type="application/json")
# Validate final mark calculator
total_worth = total_final_mark_worth(course)
if total_worth != 100:
response_data['message'] = 'total final mark must add up to 100%'
return HttpResponse(json.dumps(response_data), content_type="application/json")
# Make sure we have a final exam
is_final = has_final_exam(exams)
if is_final == False:
response_data['message'] = 'course requires only 1 final exam'
return HttpResponse(json.dumps(response_data), content_type="application/json")
review = CourseSubmission.objects.create(
course=course,
)
review.save()
# Make course available.
course.status = settings.COURSE_AVAILABLE_STATUS
course.save()
response_data = {'status' : 'success', 'message' : 'submitted course review'}
return HttpResponse(json.dumps(response_data), content_type="application/json")
0
Example 44
Project: jaikuenginepatch Source File: views.py
@alternate_nick
def actor_settings(request, nick, page='index'):
""" just a static page that links to the rest"""
nick = clean.nick(nick)
view = api.actor_lookup_nick(api.ROOT, nick)
if not api.actor_owns_actor(request.user, view):
raise exception.ApiException(exception.PRIVACY_ERROR,
'Operation not allowed')
handled = common_views.handle_view_action(
request,
{
'activation_activate_mobile': view.url('/settings/mobile'),
'activation_request_email': view.url('/settings/email'),
'activation_request_mobile': view.url('/settings/mobile'),
'settings_change_notify': view.url('/settings/notifications'),
'settings_change_privacy': request.path,
'settings_update_account': view.url('/settings/profile'),
'actor_remove': '/logout',
#'oauth_remove_consumer': request.path,
#'oauth_remove_access_token': request.path
}
)
if handled:
return handled
# TODO(tyler/termie): This conflicts with the global settings import.
# Also, this seems fishy. Do none of the settings.* items work in templates?
import settings
# TODO(tyler): Merge this into handle_view_action, if possible
if 'password' in request.POST:
try:
validate.nonce(request, 'change_password')
password = request.POST.get('password', '')
confirm = request.POST.get('confirm', '')
validate.password_and_confirm(password, confirm, field = 'password')
api.settings_change_password(request.user, view.nick, password)
response = util.RedirectFlash(view.url() + '/settings/password',
'Password updated')
request.user.password = util.hash_password(request.user.nick, password)
# TODO(mikie): change when cookie-auth is changed
user.set_user_cookie(response, request, request.user)
return response
except:
exception.handle_exception(request)
if page == 'feeds':
try:
if not settings.FEEDS_ENABLED:
raise exception.DisabledFeatureError('Feeds are currently disabled')
except:
exception.handle_exception(request)
if page == 'photo':
redirect_to = view.url() + '/settings/photo'
handled = common_views.common_photo_upload(request, redirect_to)
if handled:
return handled
area = 'settings'
full_page = page.capitalize()
if page == 'mobile':
full_page = 'Mobile Number'
mobile = api.mobile_get_actor(request.user, view.nick)
sms_notify = view.extra.get('sms_notify', False)
elif page == 'im':
full_page = 'IM Address'
im_address = api.im_get_actor(request.user, view.nick)
im_notify = view.extra.get('im_notify', False)
elif page == 'index':
email = api.email_get_actor(request.user, view.nick)
email_notify = view.extra.get('email_notify', False)
im_address = api.im_get_actor(request.user, view.nick)
im_notify = view.extra.get('im_notify', False)
elif page == 'feeds':
full_page = 'Web Feeds'
elif page == 'email':
full_page = 'Email Address'
email_notify = view.extra.get('email_notify', False)
# check if we already have an email
email = api.email_get_actor(request.user, view.nick)
# otherwise look for an unconfirmed one
if not email:
unconfirmeds = api.activation_get_actor_email(api.ROOT, view.nick)
if unconfirmeds:
unconfirmed_email = unconfirmeds[0].content
elif page == 'design':
handled = common_views.common_design_update(request, view.nick)
if handled:
return handled
full_page = 'Look and Feel'
elif page == 'notifications':
email = api.email_get_actor(request.user, view.nick)
email_notify = view.extra.get('email_notify', False)
im_address = api.im_get_actor(request.user, view.nick)
im_notify = view.extra.get('im_notify', False)
mobile = api.mobile_get_actor(request.user, request.user.nick)
sms_notify = view.extra.get('sms_notify', False)
sms_confirm = sms_notify and not view.extra.get('sms_confirmed', False)
# TODO(termie): remove this once we can actually receive sms
sms_confirm = False
elif page == 'profile':
# check if we already have an email
email = api.email_get_actor(request.user, view.nick)
# otherwise look for an unconfirmed one
if not email:
unconfirmeds = api.activation_get_actor_email(api.ROOT, view.nick)
if unconfirmeds:
unconfirmed_email = unconfirmeds[0].content
elif page == 'photo':
avatars = display.DEFAULT_AVATARS
small_photos = api.image_get_all_keys(request.user, view.nick, size='f')
# TODO(tyler): Fix this avatar nonsense!
own_photos = [{
'path' : small_photo.key().name(),
'name' : small_photo.key().name()[len('image/'):-len('_f.jpg')],
} for small_photo in small_photos
]
elif page == 'privacy':
PRIVACY_PUBLIC = api.PRIVACY_PUBLIC
PRIVACY_CONTACTS = api.PRIVACY_CONTACTS
elif page == 'jsbadge':
full_page = 'Javascript Badges'
elif page == 'badge':
badges = [{'id': 'badge-stream',
'width': '200',
'height': '300',
'src': '%sglobal/themes/%s/badge.swf' % (settings.MEDIA_URL, settings.DEFAULT_THEME),
'title': 'Stream',
},
{'id': 'badge-map',
'width': '200',
'height': '255',
'src': '%sglobal/themes/%s/badge-map.swf' % (settings.MEDIA_URL, settings.DEFAULT_THEME),
'title': 'Map',
},
{'id': 'badge-simple',
'width': '200',
'height': '200',
'src': '%sglobal/themes/%s/badge-simple.swf' % (settings.MEDIA_URL, settings.DEFAULT_THEME),
'title': 'Simple',
},
]
elif page in ['password', 'delete']:
# Catch for remaining pages before we generate a 404.
pass
else:
return common_views.common_404(request)
# rendering
c = template.RequestContext(request, locals())
t = loader.get_template('settings_%s.html' % page)
return http.HttpResponse(t.render(c))
0
Example 45
Project: django-adminactions Source File: api.py
def export_as_xls3(queryset, fields=None, header=None, # noqa
filename=None, options=None, out=None): # pragma: no cover
# sheet_name=None, header_alt=None,
# formatting=None, out=None):
"""
Exports a queryset as xls from a queryset with the given fields.
:param queryset: queryset to export (can also be list of namedtuples)
:param fields: list of fields names to export. None for all fields
:param header: if True, the exported file will have the first row as column names
:param out: object that implements File protocol.
:param header_alt: if is not None, and header is True, the first row will be as header_alt (same nr columns)
:param formatting: if is None will use formatting_default
:return: HttpResponse instance if out not supplied, otherwise out
"""
import xlsxwriter
def _get_qs_formats(queryset):
formats = {'_general_': book.add_format()}
if hasattr(queryset, 'model'):
for i, fieldname in enumerate(fields):
try:
f, __, __, __, = queryset.model._meta.get_field_by_name(fieldname)
pattern = xlsxwriter_options.get(f.name, xlsxwriter_options.get(f.__class__.__name__, 'general'))
fmt = book.add_format({'num_format': pattern})
formats[fieldname] = fmt
except FieldDoesNotExist:
pass
# styles[i] = xlwt.easyxf(num_format_str=xls_options_default.get(col_class, 'general'))
# styles[i] = xls_options_default.get(col_class, 'general')
return formats
http_response = out is None
if out is None:
# if filename is None:
# filename = filename or "%s.xls" % queryset.model._meta.verbose_name_plural.lower().replace(" ", "_")
# response = HttpResponse(content_type='application/vnd.ms-excel')
# response['Content-Disposition'] = 'attachment;filename="%s"' % filename.encode('us-ascii', 'replace')
# out = io.BytesIO()
if six.PY2:
out = six.StringIO()
elif six.PY3:
out = six.StringIO()
# out = io.BytesIO()
else:
raise EnvironmentError('Python version not supported')
config = xlsxwriter_options.copy()
if options:
config.update(options)
if fields is None:
fields = [f.name for f in queryset.model._meta.fields]
book = xlsxwriter.Workbook(out, {'in_memory': True})
sheet_name = config.pop('sheet_name')
use_display = config.get('use_display', False)
sheet = book.add_worksheet(sheet_name)
book.close()
formats = _get_qs_formats(queryset)
row = 0
sheet.write(row, 0, force_text('#'), formats['_general_'])
if header:
if not isinstance(header, (list, tuple)):
header = [force_text(f.verbose_name)for f in queryset.model._meta.fields if f.name in fields]
for col, fieldname in enumerate(header, start=1):
sheet.write(row, col, force_text(fieldname), formats['_general_'])
settingstime_zone = get_default_timezone()
for rownum, row in enumerate(queryset):
sheet.write(rownum + 1, 0, rownum + 1)
for idx, fieldname in enumerate(fields):
fmt = formats.get(fieldname, formats['_general_'])
try:
value = get_field_value(row,
fieldname,
usedisplay=use_display,
raw_callable=False)
if callable(fmt):
value = fmt(value)
if isinstance(value, (list, tuple)):
value = smart_text(u"".join(value))
if isinstance(value, datetime.datetime):
try:
value = dateformat.format(value.astimezone(settingstime_zone), config['datetime_format'])
except ValueError:
value = dateformat.format(value, config['datetime_format'])
if isinstance(value, six.binary_type):
value = smart_text(value)
sheet.write(rownum + 1, idx + 1, smart_text(value), fmt)
except Exception as e:
raise
sheet.write(rownum + 1, idx + 1, smart_text(e), fmt)
book.close()
out.seek(0)
if http_response:
if filename is None:
filename = filename or "%s.xls" % queryset.model._meta.verbose_name_plural.lower().replace(" ", "_")
response = HttpResponse(out.read(),
content_type="application/vnd.openxmlformats-officedocuement.spreadsheetml.sheet")
# content_type='application/vnd.ms-excel')
# response['Content-Disposition'] = six.b('attachment;filename="%s"') % six.b(filename.encode('us-ascii', 'replace'))
response['Content-Disposition'] = six.b('attachment;filename="%s"' % filename)
return response
return out
0
Example 46
Project: talk.org Source File: list_detail.py
def object_list(request, queryset, paginate_by=None, page=None,
allow_empty=True, template_name=None, template_loader=loader,
extra_context=None, context_processors=None, template_object_name='object',
mimetype=None):
"""
Generic list of objects.
Templates: ``<app_label>/<model_name>_list.html``
Context:
object_list
list of objects
is_paginated
are the results paginated?
results_per_page
number of objects per page (if paginated)
has_next
is there a next page?
has_previous
is there a prev page?
page
the current page
next
the next page
previous
the previous page
pages
number of pages, total
hits
number of objects, total
last_on_page
the result number of the last of object in the
object_list (1-indexed)
first_on_page
the result number of the first object in the
object_list (1-indexed)
page_range:
A list of the page numbers (1-indexed).
"""
if extra_context is None: extra_context = {}
queryset = queryset._clone()
if paginate_by:
paginator = QuerySetPaginator(queryset, paginate_by, allow_empty_first_page=allow_empty)
if not page:
page = request.GET.get('page', 1)
try:
page_number = int(page)
except ValueError:
if page == 'last':
page_number = paginator.num_pages
else:
# Page is not 'last', nor can it be converted to an int.
raise Http404
try:
page_obj = paginator.page(page_number)
except InvalidPage:
raise Http404
c = RequestContext(request, {
'%s_list' % template_object_name: page_obj.object_list,
'paginator': paginator,
'page_obj': page_obj,
# Legacy template context stuff. New templates should use page_obj
# to access this instead.
'is_paginated': page_obj.has_other_pages(),
'results_per_page': paginator.per_page,
'has_next': page_obj.has_next(),
'has_previous': page_obj.has_previous(),
'page': page_obj.number,
'next': page_obj.next_page_number(),
'previous': page_obj.previous_page_number(),
'first_on_page': page_obj.start_index(),
'last_on_page': page_obj.end_index(),
'pages': paginator.num_pages,
'hits': paginator.count,
'page_range': paginator.page_range,
}, context_processors)
else:
c = RequestContext(request, {
'%s_list' % template_object_name: queryset,
'paginator': None,
'page_obj': None,
'is_paginated': False,
}, context_processors)
if not allow_empty and len(queryset) == 0:
raise Http404
for key, value in extra_context.items():
if callable(value):
c[key] = value()
else:
c[key] = value
if not template_name:
model = queryset.model
template_name = "%s/%s_list.html" % (model._meta.app_label, model._meta.object_name.lower())
t = template_loader.get_template(template_name)
return HttpResponse(t.render(c), mimetype=mimetype)
0
Example 47
Project: fileshackproject Source File: views.py
@never_cache
@require_store
@require_login
def upload(request, store, id):
if request.method != "POST" or not request.FILES.has_key("file"):
data = {
"status": "failed",
"error_label": "Upload failed",
"error_message": "Invalid HTTP request",
}
return HttpResponseBadRequest(JSONEncoder().encode(data))
if request.FILES.has_key("file"):
f = request.FILES["file"]
name = urllib.unquote(f.name)
try: size_total = int(request.META["HTTP_X_FILE_SIZE"])
except (ValueError, KeyError): size_total = f.size
else:
name = ''
size_total = 0 # Unknown.
try: name = unicode(urllib.unquote(request.META["HTTP_X_FILE_NAME"]),
"utf-8", "replace")
except KeyError: name = ''
name = os.path.basename(name)
try: offset = int(request.META["HTTP_X_FILE_OFFSET"])
except (ValueError, KeyError): offset = 0
if store.item_limit and size_total and size_total > store.item_limit*1024*1024:
data = {
"status": "itemlimitreached",
"error_label": "Upload failed",
"error_message": "Item size is limited to %d MB" % store.item_limit,
"item": None,
}
return HttpResponseServerError(JSONEncoder().encode(data))
if store.store_limit and size_total and store.total() + size_total - offset > store.store_limit*1024*1024:
data = {
"status": "storelimitreached",
"error_label": "Upload failed",
"error_message": "The store size limit of %d MB has been reached" % store.store_limit,
"item": None,
}
return HttpResponseServerError(JSONEncoder().encode(data))
# If the item exists, open the file for append.
try:
try: id = int(id)
except ValueError: raise Item.DoesNotExist
item = Item.objects.get(pk=id)
if item.fileobject.size < offset:
data = {
"status": "outoforder",
"error_label": "Chunk out of order",
"error_message": "Application sent a chunk out of order",
"item": item.simple(),
}
return HttpResponseServerError(JSONEncoder().encode(data))
fp = default_storage.open(item.fileobject.path, "ab")
fp.truncate(offset)
# This is a new item.
except Item.DoesNotExist:
if offset != 0:
data = {
"status": "outoforder",
"error_label": "Chunk out of order",
"error_message": "Application sent a chunk of an item that does not exist",
"item": None,
}
return HttpResponseServerError(JSONEncoder().encode(data))
item = Item()
item.store = store
item.fileobject.save(name, ContentFile(""))
item.fileobject.close()
item.size_total = size_total
item.save()
fp = default_storage.open(item.fileobject.path, "wb")
chunks = f.chunks().__iter__()
while True:
try: chunk = chunks.next()
except StopIteration: break
except IOError:
fp.close()
data = {
"status": "failed",
"error_label": "Upload failed",
"error_message": "Server-side I/O error",
"item": item.simple(),
}
return HttpResponseServerError(JSONEncoder().encode(data))
else:
try:
if request.META.get("HTTP_X_FILE_ENCODING") == "base64":
fp.write(chunk.decode("base64"))
else:
fp.write(chunk)
except binascii.Error:
fp.close()
data = {
"status": "failed",
"error_label": "Upload failed",
"error_message": "The browser sent an invalid chunk",
"item": item.simple(),
}
return HttpResponseServerError(JSONEncoder().encode(data))
item.size = fp.tell()
fp.close()
if item.size_total < item.size:
item.size_total = item.size
if item.size >= item.size_total:
item.uploaded = timezone.now()
item.save()
data = {
"status": "success",
"item": Item.objects.get(pk=item.pk).simple()
}
return HttpResponse(JSONEncoder().encode(data))
0
Example 48
@vary_on_headers('Authorization')
def __call__(self, request, *args, **kwargs):
"""
NB: Sends a `Vary` header so we don't cache requests
that are different (OAuth stuff in `Authorization` header.)
"""
rm = request.method.upper()
# Django's internal mechanism doesn't pick up
# PUT request, so we trick it a little here.
if rm == "PUT":
coerce_put_post(request)
actor, anonymous = self.authenticate(request, rm)
if anonymous is CHALLENGE:
return actor()
else:
handler = actor
# Translate nested datastructs into `request.data` here.
if rm in ('POST', 'PUT'):
try:
translate_mime(request)
except MimerDataException:
return rc.BAD_REQUEST
if not hasattr(request, 'data'):
if rm == 'POST':
request.data = request.POST
else:
request.data = request.PUT
if not rm in handler.allowed_methods:
return HttpResponseNotAllowed(handler.allowed_methods)
meth = getattr(handler, self.callmap.get(rm, ''), None)
if not meth:
raise Http404
# Support emitter both through (?P<emitter_format>) and ?format=emitter.
em_format = self.determine_emitter(request, *args, **kwargs)
kwargs.pop('emitter_format', None)
# Clean up the request object a bit, since we might
# very well have `oauth_`-headers in there, and we
# don't want to pass these along to the handler.
request = self.cleanup_request(request)
try:
result = meth(request, *args, **kwargs)
except Exception, e:
result = self.error_handler(e, request, meth, em_format)
try:
emitter, ct = Emitter.get(em_format)
fields = handler.fields
if hasattr(handler, 'list_fields') and isinstance(result, (list, tuple, QuerySet)):
fields = handler.list_fields
except ValueError:
result = rc.BAD_REQUEST
result.content = "Invalid output format specified '%s'." % em_format
return result
status_code = 200
# If we're looking at a response object which contains non-string
# content, then assume we should use the emitter to format that
# content
if isinstance(result, HttpResponse) and not result._is_string:
status_code = result.status_code
# Note: We can't use result.content here because that method attempts
# to convert the content into a string which we don't want.
# when _is_string is False _container is the raw data
result = result._container
srl = emitter(result, typemapper, handler, fields, anonymous)
try:
"""
Decide whether or not we want a generator here,
or we just want to buffer up the entire result
before sending it to the client. Won't matter for
smaller datasets, but larger will have an impact.
"""
if self.stream: stream = srl.stream_render(request)
else: stream = srl.render(request)
if not isinstance(stream, HttpResponse):
resp = HttpResponse(stream, mimetype=ct, status=status_code)
else:
resp = stream
resp.streaming = self.stream
return resp
except HttpStatusCode, e:
return e.response
0
Example 49
Project: coursys Source File: search.py
@requires_role("GRAD", get_only=["GRPD"])
def search(request):
current_user = Person.objects.get(userid=request.user.username)
query_string = request.META.get('QUERY_STRING','')
savedsearches = SavedSearch.objects.filter(person=current_user, query=query_string)
if savedsearches:
savedsearch = savedsearches[0]
else:
savedsearch = None
form = SearchForm(initial={'student_status': STATUS_ACTIVE}) if len(request.GET) == 0 else SearchForm(request.GET)
requirement_choices = [(r['series'], r['description']) for r in
GradRequirement.objects.filter(program__unit__in=request.units, hidden=False)
.order_by('description').values('series', 'description').distinct()]
scholarshiptype_choices = [(st.id, st.name) for st in ScholarshipType.objects.filter(unit__in=request.units, hidden=False)]
# If the user has the grad role for more than one unit, append the unit label to the name of the program so
# they know which one they are looking at.
if len(request.units) > 1:
program_choices = [(gp.id, "%s - %s" % (gp.unit.label, gp.label)) for gp in
GradProgram.objects.filter(unit__in=request.units, hidden=False)]
else:
program_choices = [(gp.id, gp.label) for gp in GradProgram.objects.filter(unit__in=request.units, hidden=False)]
status_choices = [(st,desc) for st,desc in STATUS_CHOICES if st not in STATUS_OBSOLETE] + [('', 'None')]
supervisors = Supervisor.objects.filter(student__program__unit__in=request.units, supervisor_type='SEN',
removed=False).select_related('supervisor')
supervisors = set((s.supervisor for s in supervisors if s.supervisor))
supervisors = list(supervisors)
supervisors.sort()
supervisor_choices = [(p.id, p.sortname()) for p in supervisors]
grad_flags = GradFlag.objects.filter(unit__in=request.units)
grad_flag_choices = [(g.id, g.label) for g in grad_flags]
form.fields['requirements'].choices = requirement_choices
form.fields['incomplete_requirements'].choices = requirement_choices
form.fields['scholarshiptype'].choices = scholarshiptype_choices
form.fields['program'].choices = program_choices
form.fields['student_status'].choices = status_choices
form.fields['supervisor'].choices = supervisor_choices
form.fields['grad_flags'].choices = grad_flag_choices
if 'sort' in request.GET:
sort = _parse_sort(request.GET['sort'])
else:
sort = None;
if 'edit_search' not in request.GET and form.is_valid():
grads = form.search_results(request.units)
overflow = False
if len(grads) > MAX_RESULTS:
grads = grads[:MAX_RESULTS]
overflow = True
if savedsearch is not None:
saveform = SaveSearchForm(instance=savedsearch)
else:
saveform = SaveSearchForm(initial={'person':current_user, 'query':query_string})
columns = form.cleaned_data['columns']
# Here, we're using a nested list comprehension to convert column ids into column names -
# for example 'person.first_name' into 'First Name' - using the COLUMN_CHOICES table provided in forms.py
human_readable_column_headers = [[v[1] for _,v in enumerate(COLUMN_CHOICES) if v[0] == column][0] for column in columns]
if 'csv' in request.GET:
# CSV output
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'inline; filename="grad_search.csv"'
_generate_csv(response, columns, human_readable_column_headers, grads)
return response
elif 'excel' in request.GET:
# Excel output
response = HttpResponse(content_type='application/vnd.ms-excel')
response['Content-Disposition'] = 'inline; filename="grad_search.xls"'
_generate_excel(response, columns, human_readable_column_headers, grads)
return response
elif 'cardforms' in request.GET:
# access card requisition output
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'inline; filename="card_access.pdf"'
card_req_forms(grads, response)
return response
elif 'fasnetforms' in request.GET:
# access card requisition output
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'inline; filename="fasnet_access.pdf"'
fasnet_forms(grads, response)
return response
if overflow:
messages.warning(request, "Too many result found: limited to %i." % (MAX_RESULTS))
context = {
'grads': grads,
'human_readable_column_headers': human_readable_column_headers,
'columns': columns,
'saveform' : saveform,
'query_string': query_string,
'sort': sort,
'uses_fasnet': any(u.uses_fasnet() for u in request.units),
}
resp = render(request, 'grad/search_results.html', context)
return resp
else:
#savedsearches = SavedSearch.objects.filter(person__in=(current_user,None))
page_title = 'Graduate Student Advanced Search'
context = {
#'savedsearches' : savedsearches,
'page_title' : page_title,
'form':form,
'savedsearch' : savedsearch,
# a non-None savedsearch here means that somehow, an invalid search got saved
# the template gives the user the option to delete it
}
resp = render(request, 'grad/search.html', context)
return resp
0
Example 50
Project: ion Source File: attendance.py
@eighth_admin_required
def delinquent_students_view(request):
lower_absence_limit = request.GET.get("lower", "")
upper_absence_limit = request.GET.get("upper", "")
include_freshmen = (request.GET.get("freshmen", "off") == "on")
include_sophumores = (request.GET.get("sophumores", "off") == "on")
include_juniors = (request.GET.get("juniors", "off") == "on")
include_seniors = (request.GET.get("seniors", "off") == "on")
if not request.META["QUERY_STRING"]:
include_freshmen = True
include_sophumores = True
include_juniors = True
include_seniors = True
start_date = request.GET.get("start", "")
end_date = request.GET.get("end", "")
if not lower_absence_limit.isdigit():
lower_absence_limit = ""
lower_absence_limit_filter = 1
else:
lower_absence_limit_filter = lower_absence_limit
if not upper_absence_limit.isdigit():
upper_absence_limit = ""
upper_absence_limit_filter = 1000
else:
upper_absence_limit_filter = upper_absence_limit
try:
start_date = datetime.strptime(start_date, "%Y-%m-%d")
start_date_filter = start_date
except ValueError:
start_date = ""
start_date_filter = date(MINYEAR, 1, 1)
try:
end_date = datetime.strptime(end_date, "%Y-%m-%d")
end_date_filter = end_date
except ValueError:
end_date = ""
end_date_filter = date(MAXYEAR, 12, 31)
context = {
"lower_absence_limit": lower_absence_limit,
"upper_absence_limit": upper_absence_limit,
"include_freshmen": include_freshmen,
"include_sophumores": include_sophumores,
"include_juniors": include_juniors,
"include_seniors": include_seniors,
"start_date": start_date,
"end_date": end_date
}
query_params = ["lower", "upper", "freshmen", "sophumores", "juniors", "seniors", "start", "end"]
if set(request.GET.keys()).intersection(set(query_params)):
# attendance MUST have been taken on the activity for the absence to be valid
non_delinquents = []
delinquents = []
if int(upper_absence_limit_filter) == 0 or int(lower_absence_limit_filter) == 0:
users_with_absence = (EighthSignup.objects.filter(
was_absent=True, scheduled_activity__attendance_taken=True, scheduled_activity__block__date__gte=start_date_filter,
scheduled_activity__block__date__lte=end_date_filter).values("user").annotate(absences=Count("user")).filter(absences__gte=1)
.values("user", "absences").order_by("user"))
uids_with_absence = [row["user"] for row in users_with_absence]
all_students = User.objects.get_students().values_list("id")
uids_all_students = [row[0] for row in all_students]
uids_without_absence = set(uids_all_students) - set(uids_with_absence)
users_without_absence = User.objects.filter(id__in=uids_without_absence).order_by("id")
non_delinquents = []
for usr in users_without_absence:
non_delinquents.append({"absences": 0, "user": usr})
logger.debug(non_delinquents)
if int(upper_absence_limit_filter) > 0:
delinquents = (EighthSignup.objects.filter(
was_absent=True, scheduled_activity__attendance_taken=True, scheduled_activity__block__date__gte=start_date_filter,
scheduled_activity__block__date__lte=end_date_filter).values("user").annotate(absences=Count("user"))
.filter(absences__gte=lower_absence_limit_filter,
absences__lte=upper_absence_limit_filter).values("user", "absences").order_by("user"))
user_ids = [d["user"] for d in delinquents]
delinquent_users = User.objects.filter(id__in=user_ids).order_by("id")
for index, user in enumerate(delinquent_users):
delinquents[index]["user"] = user
logger.debug(delinquents)
delinquents = list(delinquents)
delinquents += non_delinquents
def filter_by_grade(delinquent):
grade = delinquent["user"].grade.number
include = False
if include_freshmen:
include |= (grade == 9)
if include_sophumores:
include |= (grade == 10)
if include_juniors:
include |= (grade == 11)
if include_seniors:
include |= (grade == 12)
return include
delinquents = list(filter(filter_by_grade, delinquents))
# most absences at top
delinquents = sorted(delinquents, key=lambda x: (-1 * x["absences"], x["user"].last_name))
logger.debug(delinquents)
else:
delinquents = None
context["delinquents"] = delinquents
if request.resolver_match.url_name == "eighth_admin_view_delinquent_students":
context["admin_page_title"] = "Delinquent Students"
return render(request, "eighth/admin/delinquent_students.html", context)
else:
response = http.HttpResponse(content_type="text/csv")
response["Content-Disposition"] = "attachment; filename=\"delinquent_students.csv\""
writer = csv.writer(response)
writer.writerow(["Start Date", "End Date", "Absences", "Last Name", "First Name", "Student ID", "Grade", "Counselor", "TJ Email",
"Other Email"])
for delinquent in delinquents:
row = []
row.append(str(start_date).split(" ", 1)[0])
row.append(str(end_date).split(" ", 1)[0])
row.append(delinquent["absences"])
row.append(delinquent["user"].last_name)
row.append(delinquent["user"].first_name)
row.append(delinquent["user"].student_id)
row.append(delinquent["user"].grade.number)
counselor = delinquent["user"].counselor
row.append(counselor.last_name if counselor else "")
row.append("{}".format(delinquent["user"].tj_email))
row.append(delinquent["user"].emails[0] if delinquent["user"].emails and len(delinquent["user"].emails) > 0 else "")
writer.writerow(row)
return response