Here are the examples of the python api decimal.Decimal taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
149 Examples
3
Example 1
Project: ec2-cost-tools Source File: __main__.py
@click.command()
@click.argument('region', nargs=1)
@click.option('--show-expirations/--no-show-expirations', help='Show future RI expirations')
def main(region, show_expirations=False):
od_price_table = get_price_table(LINUX_ON_DEMAND_PRICE_URL)
od_price_mapping = price_table_to_price_mapping(od_price_table)
pre_od_price_table = get_price_table(LINUX_ON_DEMAND_PREVIOUS_GEN_PRICE_URL)
pre_od_price_mapping = price_table_to_price_mapping(pre_od_price_table)
# mapping from instance type to price info
od_prices = od_price_mapping[region]
pre_prices = pre_od_price_mapping[region]
od_prices.update(pre_prices)
# we assume there are 30 days in a month
month_hours = 30 * 24
# ec2 instance totoal cost per month
ec2_total_cost = 0
# ec2 instance totoal cost if all are using on-demand
ec2_all_on_demand_total_cost = 0
conn = boto.ec2.connect_to_region(region)
result = get_reserved_analysis(conn)
columns = [
'Instance type',
'VPC',
'Zone',
'Tenancy',
'Covered',
'Instnace ID',
'Name',
'Monthly Cost',
]
table = PrettyTable(columns)
for key in columns:
table.align[key] = 'l'
table.align['Monthly Cost'] = 'r'
for (instance_type, vpc, zone, tenancy), instances in result['instance_items']:
covered_count = 0
for _, covered_price, _ in instances:
if covered_price is not None:
covered_count += 1
# on demand cost per month
od_unit_cost = decimal.Decimal(
od_prices[instance_type]['valueColumns'][0]['prices']['USD']
)
# cal total cost
total_cost = 0
for _, covered_price, _ in instances:
unit_cost = od_unit_cost
if covered_price is not None:
unit_cost = decimal.Decimal(covered_price)
total_cost += unit_cost * month_hours
table.add_row([
instance_type,
vpc,
zone,
tenancy,
'{} / {}'.format(covered_count, len(instances))
] + ([''] * 2) + [format_price(total_cost)])
for instance_id, covered_price, name in instances:
unit_cost = od_unit_cost
if covered_price is not None:
unit_cost = decimal.Decimal(covered_price)
table.add_row(([''] * 4) + [
covered_price is not None,
instance_id,
name,
format_price(unit_cost * month_hours),
])
ec2_total_cost += total_cost
ec2_all_on_demand_total_cost += (
od_unit_cost * month_hours * len(instances)
)
print(table)
columns = [
'Instance type',
'VPC',
'Zone',
'Tenancy',
'Count',
'Monthly Cost',
]
table = PrettyTable(columns)
for key in columns:
table.align[key] = 'l'
table.align['Monthly Cost'] = 'r'
not_used_reserved = result['not_used_reserved_instances'].iteritems()
for (instance_type, vpc, zone, tenancy), instances in not_used_reserved:
unit_cost = 0
if instances:
unit_cost = decimal.Decimal(
instances[0].recurring_charges[0].amount
)
reserved_cost = len(instances) * unit_cost * month_hours
table.add_row([
instance_type,
vpc,
zone,
tenancy,
len(instances),
format_price(reserved_cost),
])
ec2_total_cost += reserved_cost
print('#' * 10, 'Not in-use reserved instances', '#' * 10)
print(table)
if show_expirations:
print('#' * 10, 'Imminent RI expirations', '#' * 10)
columns = [
'Instance type',
'VPC',
'Zone',
'Tenancy',
'Count',
'Expiration',
]
table = PrettyTable(columns)
all_reserved_groups = result['all_reserved_groups'].iteritems()
for (instance_type, vpc, zone, tenancy), instances in all_reserved_groups:
skip_rows = 0
for instance in instances:
expiration = None
if instance.state == "active" and skip_rows == 0:
d = datetime.datetime.strptime( instance.start, "%Y-%m-%dT%H:%M:%S.%fZ" )
expiration = d + datetime.timedelta(seconds=instance.duration)
table.add_row([
instance_type,
vpc,
zone,
tenancy,
instance.instance_count,
expiration,
])
skip_rows = instance.instance_count
skip_rows = skip_rows - 1
print(table.get_string(sortby='Expiration'))
print('#' * 10, 'Summary', '#' * 10)
print(
'EC2 Monthly Costs:', format_price(ec2_total_cost)
)
print(
'EC2 Monthly All On Demand Costs:',
format_price(ec2_all_on_demand_total_cost)
)
print(
'Amount you saved by using reserved:',
format_price(ec2_all_on_demand_total_cost - ec2_total_cost)
)
print(
'Percentage you saved by using reserved:',
'% {:,.2f}'.format(
((ec2_all_on_demand_total_cost - ec2_total_cost) / ec2_all_on_demand_total_cost) * 100
)
)
3
Example 2
def parse(self, reader,filename=None):
bucket = None
transaction = None
accountdef = None
posts = []
for linenum, line in enumerate(reader):
linenum += 1
line = line.rstrip()
m = re.match(" *;", line)
if line == '' or m:
continue
if transaction is not None:
m = re.match("^\s+(?P<account>.*?)(\s\s+(?P<amount>.*))?$", line)
if m:
amount = None
if m.group("amount") is not None:
amount = self.parseamount(m.group("amount"),filename,linenum)
post = Post(m.group("account"),amount,filename,linenum)
posts.append(post)
continue
else:
try:
self.maketransaction(transaction, posts, bucket)
except Exception as e:
e.args = (ParseError(filename, linenum, "Parse error: %s" % e),)
raise
posts = []
transaction = None
if accountdef is not None:
# Ignore things under accountdef for now
m = re.match("^\s+(.*)$",line)
if m:
continue
else:
accountdef = None
m = re.match("(?P<date>\d{4}-\d{2}-\d{2})(=(?P<postdate>\d{4}-\d{2}-\d{2}))?\s+(?P<description>.*)", line)
if m:
if m.group("postdate") is not None:
transaction = Transaction(m.group("postdate"),m.group("description"),filename,linenum)
else:
transaction = Transaction(m.group("date"),m.group("description"),filename,linenum)
continue
m = re.match("commodity\s+(?P<commodity>.*)", line)
if m:
continue
m = re.match("account\s+(?P<account>.*)", line)
if m:
accountdef = m.groups()
continue
m = re.match("include\s+(?P<filename>.*)",line)
if m:
includefile = m.group("filename")
with open(includefile) as f:
self.parse(f,includefile)
continue
m = re.match("bucket\s+(?P<account>.*)",line)
if m:
bucket = m.group("account")
continue
m = re.match("print\s+(?P<str>.*)",line)
if m:
print m.group("str")
continue
m = re.match("alias\s+(?P<alias>.*?)\s+(?P<account>.*)",line)
if m:
self.aliases[m.group("alias")] = m.group("account")
continue
m = re.match("closeall\s+(?P<asof>\d{4}-\d{2}-\d{2})\s+(?P<prefix>.+?)\s\s+(?P<closingaccount>.*)",line)
if m:
transaction = Transaction(m.group("asof"),"Automatic closing transaction",filename,linenum)
posts = []
closing = {}
for account in self.accounts:
if account.startswith(m.group("prefix")):
balance = self.balance(account,m.group("asof"))
for commodity,value in balance.items():
if commodity not in closing:
closing[commodity] = decimal.Decimal(0)
closing[commodity] += value
posts.append(Post(account,Amount(commodity,-1*value),filename,linenum))
self.maketransaction(transaction, posts, m.group("closingaccount"))
transaction = None
posts = None
continue
m = re.match("assert\s+balance\s+(?P<asof>\d{4}-\d{2}-\d{2})?\s*(?P<account>.*?)\s\s+(?P<amount>.*)$",line)
if m:
if not self.assertions:
continue
try:
balance = self.balance(m.group("account"),m.group("asof"))
except AccountNotFoundError:
balance = self.balance_children(m.group("account"),m.group("asof"))
amount = self.parseamount(m.group("amount"),filename,linenum)
if not (amount.value == 0 and amount.commodity not in balance) and \
(amount.commodity not in balance or balance[amount.commodity] != amount.value):
raise AssertionError(filename, linenum, "Account %s actual balance of %s on %s does not match assertion value %s" % (m.group("account"),m.group("asof"), repr(balance), repr(amount)))
continue
m = re.match("assert\s+equation\s+(?P<asof>\d{4}-\d{2}-\d{2})?\s*(?P<assetsaccount>.*?)\s+-\s+(?P<liabilitiesaccount>.*?)\s+=\s+(?P<equityaccount>.*?)\s+\+\s+(?P<incomeaccount>.*?)\s+-\s+(?P<expenseaccount>.*?)$", line)
if m:
if not self.assertions:
continue
data = {}
for acct in ["assets","liabilities","equity","income","expense"]:
try:
balance = self.balance(m.group("%saccount" % acct),m.group("asof"))
except AccountNotFoundError:
balance = self.balance_children(m.group("%saccount" % acct),m.group("asof"))
data[acct] = balance
# Assets + liabilities
left = {}
right = {}
for commodity in self.commodities:
left[commodity] = decimal.Decimal(0)
right[commodity] = decimal.Decimal(0)
# Left
if commodity in data["assets"]:
left[commodity] += data["assets"][commodity]
if commodity in data["liabilities"]:
left[commodity] += data["liabilities"][commodity]
# Right
if commodity in data["equity"]:
right[commodity] -= data["equity"][commodity]
if commodity in data["income"]:
right[commodity] -= data["income"][commodity]
if commodity in data["expense"]:
right[commodity] -= data["expense"][commodity]
if left != right:
print data
raise AssertionError(filename, linenum, "Accounting equation not satisified: %s != %s" % (repr(left), repr(right)))
continue
raise ParseError(filename, linenum, "Don't know how to parse \"%s\"" % line)
if transaction is not None:
self.maketransaction(transaction,posts,bucket)
3
Example 3
Project: coursys Source File: tests.py
def setUp(self):
"""
Build a TACategory, TAContract, and two TACourses
"""
unit = Unit(label="TEST", name="A Fake Unit for Testing")
unit.save()
person = Person(emplid="300000000",
userid="testy",
first_name="Testy",
last_name="Testerson")
person.save()
semester = Semester(name="1147",
start=datetime.date.today(),
end=datetime.date.today())
semester.save()
course1 = Course(subject="TEST",
number="100",
title="Intro to Testing")
course1.save()
course2 = Course(subject="TEST",
number="200",
title="Advanced Testing")
course2.save()
courseoffering1 = CourseOffering(subject="TEST",
number="100",
section="D100",
semester=semester,
component="LEC",
owner=unit,
title="Intro to Testing",
campus="BRNBY",
enrl_cap=100,
enrl_tot=100,
wait_tot=50,
course=course1)
courseoffering1.save()
courseoffering2 = CourseOffering(subject="TEST",
number="200",
section="D200",
semester=semester,
component="LEC",
owner=unit,
title="Advanced Testing",
campus="BRNBY",
enrl_cap=100,
enrl_tot=100,
wait_tot=50,
course=course2)
courseoffering2.save()
account = Account(unit=unit,
account_number=1337,
position_number=5,
title="A Fake Account for Testing")
account.save()
four_weeks_later = datetime.date.today() + datetime.timedelta(days=28)
hiring_semester = HiringSemester(semester=semester,
unit=unit,
deadline_for_acceptance=datetime.date.today(),
pay_start=datetime.date.today(),
pay_end=four_weeks_later,
payperiods=2.5)
hiring_semester.save()
category = TACategory(account=account,
hiring_semester=hiring_semester,
code="TST",
title="Test Contract Category",
hours_per_bu=decimal.Decimal("42"),
holiday_hours_per_bu=decimal.Decimal("1.1"),
pay_per_bu=decimal.Decimal("100.00"),
scholarship_per_bu=decimal.Decimal("25.00"),
bu_lab_bonus=decimal.Decimal("0.17"))
category.save()
contract = TAContract(category=category,
person=person,
status="NEW",
sin="123456789",
deadline_for_acceptance=datetime.date.today(),
pay_start=datetime.date.today(),
pay_end=datetime.date.today() + datetime.timedelta(days=10),
payperiods=2.5,
appointment="INIT",
conditional_appointment=True,
created_by="classam",
tssu_appointment=True)
contract.save()
tacourse = TACourse(course=courseoffering1,
contract=contract,
bu=decimal.Decimal('3.0'),
labtut=True)
tacourse.save()
tacourse2 = TACourse(course=courseoffering2,
contract=contract,
bu=decimal.Decimal('2.0'),
labtut=False)
tacourse2.save()
3
Example 4
Project: coursys Source File: models.py
def activity_marks_from_JSON(activity, userid, data, save=False):
"""
Build ActivityMark and ActivityComponentMark objects from imported JSON data.
Since validating the input involves almost all of the work of saving the data, this function handles both. It is
called once from is_valid with save==False to check everything, and again with save==True to actually do the work.
Redundant yes, but it lets is_valid actually do its job without side effects.
"""
if not isinstance(data, dict):
raise ValidationError(u'Outer JSON data structure must be an object.')
if 'marks' not in data:
raise ValidationError(u'Outer JSON data object must contain key "marks".')
if not isinstance(data['marks'], list):
raise ValidationError(u'Value for "marks" must be a list.')
# All the ActivityMark and ActivityComponentMark objects get built here:
# we basically have to do this work to validate anyway.
components = ActivityComponent.objects.filter(numeric_activity_id=activity.id, deleted=False)
components = dict((ac.slug, ac) for ac in components)
found = set()
combine = False # are we combining these marks with existing (as opposed to overwriting)?
if 'combine' in data and bool(data['combine']):
combine = True
for markdata in data['marks']:
if not isinstance(markdata, dict):
raise ValidationError(u'Elements of array must be JSON objects.')
# build the ActivityMark object and populate as much as possible for now.
if activity.group and 'group' in markdata:
# GroupActivityMark
try:
group = Group.objects.get(slug=markdata['group'], courseoffering=activity.offering)
except Group.DoesNotExist:
raise ValidationError(u'Group with id "%s" not found.' % (markdata['group']))
am = GroupActivityMark(activity_id=activity.id, numeric_activity_id=activity.id, group=group, created_by=userid)
recordid = markdata['group']
elif 'userid' in markdata:
# StudentActivityMark
try:
member = Member.objects.get(person__userid=markdata['userid'], offering=activity.offering, role="STUD")
except Member.DoesNotExist:
raise ValidationError(u'Userid %s not in course.' % (markdata['userid']))
am = StudentActivityMark(activity_id=activity.id, created_by=userid)
recordid = markdata['userid']
else:
raise ValidationError(u'Must specify "userid" or "group" for mark.')
# check for duplicates in import
if recordid in found:
raise ValidationError(u'Duplicate marks for "%s".' % (recordid))
found.add(recordid)
if combine:
# if we're being asked to combine with old marks, get the old one (if exists)
try:
if activity.group:
old_am = get_group_mark(activity, group)
else:
old_am = get_activity_mark_for_student(activity, member)
except NumericGrade.DoesNotExist:
old_am = None
acms = [] # ActivityComponentMarks we will create for am
# build ActivityComponentMarks
found_comp_slugs = set()
mark_total = 0
late_percent = decimal.Decimal(0)
mark_penalty = decimal.Decimal(0)
mark_penalty_reason = ""
overall_comment = ""
file_filename = None
file_data = None
file_mediatype = None
# Added for the special case where we have a numeric mark only, without components. This can happen when
# using the "mark for all groups/users" form.
the_mark = decimal.Decimal(0)
if combine and old_am:
late_percent = old_am.late_penalty
mark_penalty = old_am.mark_adjustment
mark_penalty_reason = old_am.mark_adjustment_reason
overall_comment = old_am.overall_comment
for slug in markdata:
# handle special-case slugs (that don't represent MarkComponents)
if slug in ['userid', 'group']:
continue
elif slug == 'the_mark':
the_mark = decimal.Decimal(str(markdata[slug]))
continue
elif slug=="late_percent":
try:
late_percent = decimal.Decimal(str(markdata[slug]))
except decimal.InvalidOperation:
raise ValidationError(u'Value for "late_percent" must be numeric in record for "%s".' % (recordid))
continue
elif slug=="mark_penalty":
try:
mark_penalty = decimal.Decimal(str(markdata[slug]))
except decimal.InvalidOperation:
raise ValidationError(u'Value for "mark_penalty" must be numeric in record for "%s".' % (recordid))
continue
elif slug=="mark_penalty_reason":
mark_penalty_reason = unicode(markdata[slug])
continue
elif slug=="overall_comment":
overall_comment = unicode(markdata[slug])
continue
elif slug=="attach_type":
file_mediatype = str(markdata[slug])
continue
elif slug=="attach_filename":
file_filename = unicode(markdata[slug])
continue
elif slug=="attach_data":
try:
file_data = base64.b64decode(markdata[slug])
except TypeError:
raise ValidationError('Invalid base64 file data for "%s"' % (recordid))
continue
# handle MarkComponents
if slug in components and slug not in found_comp_slugs:
comp = components[slug]
found_comp_slugs.add(slug)
elif slug in components:
# shouldn't happen because JSON lib forces unique keys, but let's be extra safe...
raise ValidationError(u'Multiple values given for "%s" in record for "%s".' % (slug, recordid))
else:
raise ValidationError(u'Mark component "%s" not found in record for "%s".' % (slug, recordid))
cm = ActivityComponentMark(activity_component=comp)
acms.append(cm) # can't set activity_mark yet since it doesn't have an id
componentdata = markdata[slug]
if not isinstance(componentdata, dict):
raise ValidationError(u'Mark component data must be JSON object (in "%s" for "%s").' % (slug, recordid))
if 'mark' not in componentdata:
raise ValidationError(u'Must give "mark" for "%s" in record for "%s".' % (comp.title, recordid))
try:
value = decimal.Decimal(str(componentdata['mark']))
except decimal.InvalidOperation:
raise ValidationError(u'Value for "mark" must be numeric for "%s" in record for "%s".' % (comp.title, recordid))
cm.value = value
mark_total += float(componentdata['mark'])
if 'comment' in componentdata and save:
cm.comment = unicode(componentdata['comment'])
if 'display_raw' in componentdata and save:
cm.set_display_raw(bool(componentdata['display_raw']))
# In the case of combined gradings, we have to get the value from old components to add to it.
if combine:
for slug in set(components.keys()) - found_comp_slugs:
# handle missing components
cm = ActivityComponentMark(activity_component=components[slug])
acms.append(cm) # can't set activity_mark yet since it doesn't have an id
if old_am:
old_cm = ActivityComponentMark.objects.get(activity_mark=old_am, activity_component=components[slug])
mark_total += float(old_cm.value)
cm.value = old_cm.value
cm.comment = old_cm.comment
cm.set_display_raw(old_cm.display_raw())
# handle file attachment
if file_filename or file_data or file_mediatype:
# new attachment
if not (file_filename and file_data and file_mediatype):
raise ValidationError(u'Must specify all or none of "attach_type", "attach_filename", "attach_data" in record for "%s"' % (recordid))
am.file_mediatype = file_mediatype
if save:
am.file_attachment.save(name=file_filename, content=ContentFile(file_data), save=False)
elif combine and old_am:
# recycle old
am.file_attachment = old_am.file_attachment
am.file_mediatype = old_am.file_mediatype
else:
# none
am.file_attachment = None
am.file_mediatype = None
am.late_penalty = late_percent
am.mark_adjustment = mark_penalty
am.mark_adjustment_reason = mark_penalty_reason
am.overall_comment = overall_comment
mark_total = the_mark or ((1-late_percent/decimal.Decimal(100)) *
(decimal.Decimal(str(mark_total)) - mark_penalty))
# put the total mark and numeric grade objects in place
am.mark = mark_total
value = mark_total
if isinstance(am, StudentActivityMark):
grades = NumericGrade.objects.filter(activity_id=activity.id, member=member)
if grades:
numeric_grade = grades[0]
numeric_grade.flag = "GRAD"
else:
numeric_grade = NumericGrade(activity_id=activity.id, member=member, flag="GRAD")
numeric_grade.value = value
if save:
numeric_grade.save(entered_by=userid)
am.numeric_grade = numeric_grade
else:
group_members = GroupMember.objects.filter(group=group, activity_id=activity.id, confirmed=True)
for g_member in group_members:
try:
ngrade = NumericGrade.objects.get(activity_id=activity.id, member=g_member.student)
except NumericGrade.DoesNotExist:
ngrade = NumericGrade(activity_id=activity.id, member=g_member.student)
ngrade.value = value
ngrade.flag = 'GRAD'
if save:
ngrade.save(entered_by=userid)
if save:
am.save()
for cm in acms:
cm.activity_mark = am
cm.save()
return found
3
Example 5
Project: edx-platform Source File: views.py
@require_config(DonationConfiguration)
@require_POST
@login_required
def donate(request):
"""Add a single donation item to the cart and proceed to payment.
Warning: this call will clear all the items in the user's cart
before adding the new item!
Arguments:
request (Request): The Django request object. This should contain
a JSON-serialized dictionary with "amount" (string, required),
and "course_id" (slash-separated course ID string, optional).
Returns:
HttpResponse: 200 on success with JSON-encoded dictionary that has keys
"payment_url" (string) and "payment_params" (dictionary). The client
should POST the payment params to the payment URL.
HttpResponse: 400 invalid amount or course ID.
HttpResponse: 404 donations are disabled.
HttpResponse: 405 invalid request method.
Example usage:
POST /shoppingcart/donation/
with params {'amount': '12.34', course_id': 'edX/DemoX/Demo_Course'}
will respond with the signed purchase params
that the client can send to the payment processor.
"""
amount = request.POST.get('amount')
course_id = request.POST.get('course_id')
# Check that required parameters are present and valid
if amount is None:
msg = u"Request is missing required param 'amount'"
log.error(msg)
return HttpResponseBadRequest(msg)
try:
amount = (
decimal.Decimal(amount)
).quantize(
decimal.Decimal('.01'),
rounding=decimal.ROUND_DOWN
)
except decimal.InvalidOperation:
return HttpResponseBadRequest("Could not parse 'amount' as a decimal")
# Any amount is okay as long as it's greater than 0
# Since we've already quantized the amount to 0.01
# and rounded down, we can check if it's less than 0.01
if amount < decimal.Decimal('0.01'):
return HttpResponseBadRequest("Amount must be greater than 0")
if course_id is not None:
try:
course_id = CourseLocator.from_string(course_id)
except InvalidKeyError:
msg = u"Request included an invalid course key: {course_key}".format(course_key=course_id)
log.error(msg)
return HttpResponseBadRequest(msg)
# Add the donation to the user's cart
cart = Order.get_cart_for_user(request.user)
cart.clear()
try:
# Course ID may be None if this is a donation to the entire organization
Donation.add_to_order(cart, amount, course_id=course_id)
except InvalidCartItem as ex:
log.exception(
u"Could not create donation item for amount '%s' and course ID '%s'",
amount,
course_id
)
return HttpResponseBadRequest(unicode(ex))
# Start the purchase.
# This will "lock" the purchase so the user can't change
# the amount after we send the information to the payment processor.
# If the user tries to make another donation, it will be added
# to a new cart.
cart.start_purchase()
# Construct the response params (JSON-encoded)
callback_url = request.build_absolute_uri(
reverse("shoppingcart.views.postpay_callback")
)
# Add extra to make it easier to track transactions
extra_data = [
unicode(course_id) if course_id else "",
"donation_course" if course_id else "donation_general"
]
response_params = json.dumps({
# The HTTP end-point for the payment processor.
"payment_url": get_purchase_endpoint(),
# Parameters the client should send to the payment processor
"payment_params": get_signed_purchase_params(
cart,
callback_url=callback_url,
extra_data=extra_data
),
})
return HttpResponse(response_params, content_type="text/json")
2
Example 6
Project: ANALYSE Source File: views.py
@require_POST
@login_required
def create_order(request):
"""
Submit PhotoVerification and create a new Order for this verified cert
"""
if not SoftwareSecurePhotoVerification.user_has_valid_or_pending(request.user):
attempt = SoftwareSecurePhotoVerification(user=request.user)
try:
b64_face_image = request.POST['face_image'].split(",")[1]
b64_photo_id_image = request.POST['photo_id_image'].split(",")[1]
except IndexError:
context = {
'success': False,
}
return JsonResponse(context)
attempt.upload_face_image(b64_face_image.decode('base64'))
attempt.upload_photo_id_image(b64_photo_id_image.decode('base64'))
attempt.mark_ready()
attempt.save()
course_id = request.POST['course_id']
course_id = CourseKey.from_string(course_id)
donation_for_course = request.session.get('donation_for_course', {})
current_donation = donation_for_course.get(unicode(course_id), decimal.Decimal(0))
contribution = request.POST.get("contribution", donation_for_course.get(unicode(course_id), 0))
try:
amount = decimal.Decimal(contribution).quantize(decimal.Decimal('.01'), rounding=decimal.ROUND_DOWN)
except decimal.InvalidOperation:
return HttpResponseBadRequest(_("Selected price is not valid number."))
if amount != current_donation:
donation_for_course[unicode(course_id)] = amount
request.session['donation_for_course'] = donation_for_course
# prefer professional mode over verified_mode
current_mode = CourseMode.verified_mode_for_course(course_id)
# make sure this course has a verified mode
if not current_mode:
return HttpResponseBadRequest(_("This course doesn't support verified certificates"))
if current_mode.slug == 'professional':
amount = current_mode.min_price
if amount < current_mode.min_price:
return HttpResponseBadRequest(_("No selected price or selected price is below minimum."))
# I know, we should check this is valid. All kinds of stuff missing here
cart = Order.get_cart_for_user(request.user)
cart.clear()
enrollment_mode = current_mode.slug
CertificateItem.add_to_order(cart, course_id, amount, enrollment_mode)
# Change the order's status so that we don't accidentally modify it later.
# We need to do this to ensure that the parameters we send to the payment system
# match what we store in the database.
# (Ordinarily we would do this client-side when the user submits the form, but since
# the JavaScript on this page does that immediately, we make the change here instead.
# This avoids a second AJAX call and some additional complication of the JavaScript.)
# If a user later re-enters the verification / payment flow, she will create a new order.
cart.start_purchase()
callback_url = request.build_absolute_uri(
reverse("shoppingcart.views.postpay_callback")
)
params = get_signed_purchase_params(
cart,
callback_url=callback_url,
extra_data=[unicode(course_id), current_mode.slug]
)
params['success'] = True
return HttpResponse(json.dumps(params), content_type="text/json")
2
Example 7
Project: nuodb-python Source File: nuodb_basic_test.py
def test_all_types(self):
con = self._connect()
cursor = con.cursor()
cursor.execute("drop table typetest if exists")
try:
cursor.execute("create table typetest (id integer GENERATED ALWAYS AS IDENTITY, binary_col binary(10), " +
"bool_col boolean, timestamp_col timestamp, time_col time, date_col date, string_col string, " +
"varchar_col varchar(10), char_col char(10), smallint_col smallint, integer_col integer, bigint_col bigint, " +
"numeric_col numeric(10, 2), decimal_col decimal(10, 2), number_col number, double_col double, clob_col clob, blob_col blob)")
vals = (
pynuodb.Binary("binary"),
False,
pynuodb.Timestamp(1990, 12, 31, 19, 0, 0),
pynuodb.Time(10, 30, 44),
pynuodb.Date(1998, 1, 1),
"this",
"is a",
"test",
-13546,
156465465,
-3135135132132104354,
decimal.Decimal('-354564.12'),
decimal.Decimal('77788864.6'),
decimal.Decimal('-46543213.01324654'),
-999.999999,
"The test",
pynuodb.Binary("test"),
)
cursor.execute("insert into typetest (binary_col, bool_col, timestamp_col, time_col, date_col, string_col, " +
"varchar_col, char_col, smallint_col, integer_col, bigint_col, numeric_col, decimal_col, " +
"number_col, double_col, clob_col, blob_col) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", vals)
con.commit()
cursor.execute("select * from typetest order by id desc limit 1")
row = cursor.fetchone()
for i in range(1, 3):
self.assertEqual(row[i], vals[i - 1])
self.assertIsInstance(row[3], pynuodb.Timestamp)
self.assertIsInstance(row[4], pynuodb.Time)
self.assertIsInstance(row[5], pynuodb.Date)
self.assertEqual(row[3].year, vals[2].year)
self.assertEqual(row[3].month, vals[2].month)
self.assertEqual(row[3].day, vals[2].day)
self.assertEqual(row[3].hour, vals[2].hour)
self.assertEqual(row[3].minute, vals[2].minute)
self.assertEqual(row[3].second, vals[2].second)
self.assertEqual(row[3].microsecond, vals[2].microsecond)
self.assertEqual(row[4].hour, vals[3].hour)
self.assertEqual(row[4].minute, vals[3].minute)
self.assertEqual(row[4].second, vals[3].second)
self.assertEqual(row[4].microsecond, vals[3].microsecond)
self.assertEqual(row[5].year, vals[4].year)
self.assertEqual(row[5].month, vals[4].month)
self.assertEqual(row[5].day, vals[4].day)
for i in range(6, len(row)):
self.assertEqual(row[i], vals[i - 1])
finally:
try:
cursor.execute("drop table typetest if exists")
finally:
con.close()
2
Example 8
Project: pyorient Source File: test_ogm.py
def testMoney(self):
assert len(MoneyNode.registry) == 2
assert len(MoneyRelationship.registry) == 1
g = self.g
if g.server_version.major == 1:
self.skipTest(
'UUID method does not exists in OrientDB version < 2')
costanzo = g.people.create(full_name='Costanzo Veronesi', uuid=UUID())
valerius = g.people.create(full_name='Valerius Burgstaller'
, uuid=UUID())
if g.server_version >= (2,1,0):
# Default values supported
oliver = g.people.create(full_name='Oliver Girard')
else:
oliver = g.people.create(full_name='Oliver Girard', uuid=UUID())
# If you override nullable properties to be not-mandatory, be aware that
# OrientDB version < 2.1.0 does not count null
assert Person.objects.query().what(distinct(Person.uuid)).count() == 3
original_inheritance = decimal.Decimal('1520841.74309871919')
inheritance = g.wallets.create(
amount_precise = original_inheritance
, amount_imprecise = original_inheritance)
assert inheritance.amount_precise == original_inheritance
assert inheritance.amount_precise != inheritance.amount_imprecise
pittance = decimal.Decimal('0.1')
poor_pouch = g.wallets.create(
amount_precise=pittance
, amount_imprecise=pittance)
assert poor_pouch.amount_precise == pittance
assert poor_pouch.amount_precise != poor_pouch.amount_imprecise
# Django-style creation
costanzo_claim = Carries.objects.create(costanzo, inheritance)
valerius_claim = Carries.objects.create(valerius, inheritance)
oliver_carries = Carries.objects.create(oliver, poor_pouch)
g.scripts.add(GroovyScripts.from_file(
os.path.join(
os.path.split(
os.path.abspath(__file__))[0], 'money.groovy')), 'money')
rich_list = g.gremlin('rich_list', 1000000, namespace='money')
assert costanzo in rich_list and valerius in rich_list \
and oliver not in rich_list
bigwallet_query = g.query(Wallet).filter(Wallet.amount_precise > 100000)
smallerwallet_query = g.query(Wallet).filter(
Wallet.amount_precise < 100000)
# Basic query slicing
assert len(bigwallet_query[:]) == 1
assert len(smallerwallet_query) == 1
assert bigwallet_query.first() == inheritance
pouch = smallerwallet_query[0]
assert pouch == poor_pouch
assert len(pouch.outE()) == len(pouch.out())
assert pouch.in_() == pouch.both() and pouch.inE() == pouch.bothE()
first_inE = pouch.inE()[0]
assert first_inE == oliver_carries
assert first_inE.outV() == oliver and first_inE.inV() == poor_pouch
for i, wallet in enumerate(g.query(Wallet)):
print(decimal.Decimal(wallet.amount_imprecise) -
wallet.amount_precise)
assert i < 2
schema_registry = g.build_mapping(MoneyNode, MoneyRelationship)
assert all(c in schema_registry for c in ['person', 'wallet', 'carries'])
WalletType = schema_registry['wallet']
# Original property name, amount_precise, lost-in-translation
assert type(WalletType.amount) == Decimal
assert type(WalletType.amount_imprecise) == Float
g.include(schema_registry)
debt = decimal.Decimal(-42.0)
WalletType.objects.create(amount=debt, amount_imprecise=0)
assert g.query(Wallet)[2].amount == -42
2
Example 9
Project: ANALYSE Source File: views.py
@require_config(DonationConfiguration)
@require_POST
@login_required
def donate(request):
"""Add a single donation item to the cart and proceed to payment.
Warning: this call will clear all the items in the user's cart
before adding the new item!
Arguments:
request (Request): The Django request object. This should contain
a JSON-serialized dictionary with "amount" (string, required),
and "course_id" (slash-separated course ID string, optional).
Returns:
HttpResponse: 200 on success with JSON-encoded dictionary that has keys
"payment_url" (string) and "payment_params" (dictionary). The client
should POST the payment params to the payment URL.
HttpResponse: 400 invalid amount or course ID.
HttpResponse: 404 donations are disabled.
HttpResponse: 405 invalid request method.
Example usage:
POST /shoppingcart/donation/
with params {'amount': '12.34', course_id': 'edX/DemoX/Demo_Course'}
will respond with the signed purchase params
that the client can send to the payment processor.
"""
amount = request.POST.get('amount')
course_id = request.POST.get('course_id')
# Check that required parameters are present and valid
if amount is None:
msg = u"Request is missing required param 'amount'"
log.error(msg)
return HttpResponseBadRequest(msg)
try:
amount = (
decimal.Decimal(amount)
).quantize(
decimal.Decimal('.01'),
rounding=decimal.ROUND_DOWN
)
except decimal.InvalidOperation:
return HttpResponseBadRequest("Could not parse 'amount' as a decimal")
# Any amount is okay as long as it's greater than 0
# Since we've already quantized the amount to 0.01
# and rounded down, we can check if it's less than 0.01
if amount < decimal.Decimal('0.01'):
return HttpResponseBadRequest("Amount must be greater than 0")
if course_id is not None:
try:
course_id = CourseLocator.from_string(course_id)
except InvalidKeyError:
msg = u"Request included an invalid course key: {course_key}".format(course_key=course_id)
log.error(msg)
return HttpResponseBadRequest(msg)
# Add the donation to the user's cart
cart = Order.get_cart_for_user(request.user)
cart.clear()
try:
# Course ID may be None if this is a donation to the entire organization
Donation.add_to_order(cart, amount, course_id=course_id)
except InvalidCartItem as ex:
log.exception((
u"Could not create donation item for "
u"amount '{amount}' and course ID '{course_id}'"
).format(amount=amount, course_id=course_id))
return HttpResponseBadRequest(unicode(ex))
# Start the purchase.
# This will "lock" the purchase so the user can't change
# the amount after we send the information to the payment processor.
# If the user tries to make another donation, it will be added
# to a new cart.
cart.start_purchase()
# Construct the response params (JSON-encoded)
callback_url = request.build_absolute_uri(
reverse("shoppingcart.views.postpay_callback")
)
# Add extra to make it easier to track transactions
extra_data = [
unicode(course_id) if course_id else "",
"donation_course" if course_id else "donation_general"
]
response_params = json.dumps({
# The HTTP end-point for the payment processor.
"payment_url": get_purchase_endpoint(),
# Parameters the client should send to the payment processor
"payment_params": get_signed_purchase_params(
cart,
callback_url=callback_url,
extra_data=extra_data
),
})
return HttpResponse(response_params, content_type="text/json")
2
Example 10
Project: bowshock Source File: earth.py
def imagery(lon=None, lat=None, dim=None, date=None, cloud_score=None):
'''
# ----------QUERY PARAMETERS----------
# Parameter Type Default Description
# lat float n/a Latitude
# lon float n/a Longitude
# dim float 0.025 width and height of image in degrees
# date YYYY-MM-DD today date of image ----if not supplied, then the most recent image (i.e., closest to today) is returned
#cloud_score bool False calculate the percentage of the image covered by clouds
#api_key string vDEMO_KEY api.data.gov key for expanded usage
# ---------EXAMPLE QUERY--------
# https://api.data.gov/nasa/planetary/earth/imagery?lon=100.75&lat=1.5&date=2014-02-01&cloud_score=True&api_key=DEMO_KEY
'''
base_url = "http://api.data.gov/nasa/planetary/earth/imagery?"
if not lon or not lat:
raise ValueError(
"imagery endpoint expects lat and lon, type has to be float. Call the method with keyword args. Ex : lon=100.75, lat=1.5")
else:
try:
validate_float(lon, lat)
# Floats are entered/displayed as decimal numbers, but your computer
# (in fact, your standard C library) stores them as binary.
# You get some side effects from this transition:
# >>> print len(repr(0.1))
# 19
# >>> print repr(0.1)
# 0.10000000000000001
# Thus using decimal to str transition is more reliant
lon = decimal.Decimal(lon)
lat = decimal.Decimal(lat)
base_url += "lon=" + str(lon) + "&" + "lat=" + str(lat) + "&"
except:
raise ValueError(
"imagery endpoint expects lat and lon, type has to be float. Call the method with keyword args. Ex : lon=100.75, lat=1.5")
if dim:
try:
validate_float(dim)
dim = decimal.Decimal(dim)
base_url += "dim=" + str(dim) + "&"
except:
raise ValueError("imagery endpoint expects dim to be a float")
if date:
try:
vali_date(date)
base_url += "date=" + date + "&"
except:
raise ValueError("Incorrect date format, should be YYYY-MM-DD")
if cloud_score == True:
base_url += "cloud_score=True" + "&"
req_url = base_url + "api_key=" + nasa_api_key()
return dispatch_http_get(req_url)
2
Example 11
Project: billy Source File: __init__.py
@never_cache
@is_superuser
def bills(request, abbr):
meta, report = _meta_and_report(abbr)
terms = list(chain.from_iterable(map(itemgetter('sessions'),
meta['terms'])))
def sorter(item, index=terms.index, len_=len(terms)):
'''Sort session strings in order described in metadata.'''
session, data = item
return index(session)
# Convert sessions into an ordered dict.
sessions = report['bills']['sessions']
sessions = sorted(sessions.items(), key=sorter)
sessions = OrderedDict(sessions)
def decimal_format(value, TWOPLACES=decimal.Decimal(100) ** -1):
'''Format a float like 2.2345123 as a decimal like 2.23'''
n = decimal.Decimal(str(value))
n = n.quantize(TWOPLACES)
return unicode(n)
# Define data for the tables for counts, types, etc.
tablespecs = [
('Bill Counts', {'rownames': ['upper_count', 'lower_count',
'version_count']}),
('Bill Types', {
'keypath': ['bill_types'], 'summary': {
'object_type': 'bills', 'key': 'type',
},
}),
('Actions by Type', {
'keypath': ['actions_per_type'], 'summary': {
'object_type': 'actions',
'key': 'type',
},
}),
('Actions by Actor', {
'keypath': ['actions_per_actor'], 'summary': {
'object_type': 'actions',
'key': 'actor',
},
}),
('Quality Issues', {'rownames': [
'sponsorless_count', 'actionless_count', 'actions_unsorted',
'bad_vote_counts', 'version_count', 'versionless_count',
'sponsors_with_id', 'rollcalls_with_leg_id', 'have_subjects',
'updated_this_year', 'updated_this_month', 'updated_today',
'vote_passed']}),
]
format_as_percent = [
'sponsors_with_id',
'rollcalls_with_leg_id',
'have_subjects',
'updated_this_year',
'updated_this_month',
'updated_today',
'actions_per_actor',
'actions_per_type']
# Create the data for each table.
tables = []
for name, spec in tablespecs:
column_names = []
rows = defaultdict(list)
href_params = {}
tabledata = {'abbr': abbr,
'title': name,
'column_names': column_names,
'rows': rows}
contexts = []
for session, context in sessions.items():
column_names.append(session)
if 'keypath' in spec:
for k in spec['keypath']:
context = context[k]
contexts.append(context)
try:
rownames = spec['rownames']
except KeyError:
rownames = reduce(lambda x, y: set(x) | set(y), contexts)
for context in contexts:
for r in rownames:
val = context.get(r, 0)
if not isinstance(val, (int, float, decimal.Decimal)):
val = len(val)
use_percent = any([
r in format_as_percent,
name in ['Actions by Actor', 'Actions by Type'],
])
if use_percent and (val != 0):
val = decimal_format(val)
val += ' %'
rows[r].append(val)
# Link to summary/distinct views.
if 'summary' in spec:
try:
spec_val = spec['spec'](r)
except KeyError:
spec_val = r
else:
spec_val = json.dumps(spec_val, cls=JSONEncoderPlus)
params = dict(spec['summary'], session=session,
val=spec_val)
params = urllib.urlencode(params)
href_params[r] = params
# Add the final "total" column.
tabledata['column_names'].append('Total')
for k, v in rows.items():
try:
sum_ = sum(v)
except TypeError:
sum_ = 'n/a'
v.append(sum_)
rowdata = [((r, href_params.get(r)), cells)
for (r, cells) in rows.items()]
tabledata['rowdata'] = rowdata
tables.append(tabledata)
# ------------------------------------------------------------------------
# Render the tables.
_render = functools.partial(render_to_string, 'billy/bills_table.html')
tables = map(_render, tables)
return render(request, "billy/bills.html",
dict(tables=tables, metadata=meta, sessions=sessions,
tablespecs=tablespecs))
2
Example 12
Project: dj-stripe Source File: test_managers.py
def setUp(self):
customer = Customer.objects.create(stripe_id="cus_XXXXXXX")
self.march_charge = Charge.objects.create(
stripe_id="ch_XXXXMAR1",
customer=customer,
charge_created=datetime.datetime(2015, 3, 31)
)
self.april_charge_1 = Charge.objects.create(
stripe_id="ch_XXXXAPR1",
customer=customer,
paid=True,
amount=decimal.Decimal("20.15"),
fee=decimal.Decimal("4.90"),
charge_created=datetime.datetime(2015, 4, 1)
)
self.april_charge_2 = Charge.objects.create(
stripe_id="ch_XXXXAPR2",
customer=customer,
paid=True,
amount=decimal.Decimal("10.35"),
amount_refunded=decimal.Decimal("5.35"),
charge_created=datetime.datetime(2015, 4, 18)
)
self.april_charge_3 = Charge.objects.create(
stripe_id="ch_XXXXAPR3",
customer=customer,
paid=False,
amount=decimal.Decimal("100.00"),
amount_refunded=decimal.Decimal("80.00"),
fee=decimal.Decimal("5.00"),
charge_created=datetime.datetime(2015, 4, 30)
)
self.may_charge = Charge.objects.create(
stripe_id="ch_XXXXMAY1",
customer=customer,
charge_created=datetime.datetime(2015, 5, 1)
)
self.november_charge = Charge.objects.create(
stripe_id="ch_XXXXNOV1",
customer=customer,
charge_created=datetime.datetime(2015, 11, 16)
)
self.charge_2014 = Charge.objects.create(
stripe_id="ch_XXXX20141",
customer=customer,
charge_created=datetime.datetime(2014, 12, 31)
)
self.charge_2016 = Charge.objects.create(
stripe_id="ch_XXXX20161",
customer=customer,
charge_created=datetime.datetime(2016, 1, 1)
)
2
Example 13
Project: osmc Source File: update_service.py
def position_icon(self):
''' Sets the position of the icon.
Original image dimensions are 175 wide and 75 tall. This is for 1080p '''
self.try_image_position_again = False
pos_horiz = self.s['pos_x'] / 100.0
pos_vertic = self.s['pos_y'] / 100.0
width = 175 # as % of 1920: 0.0911458333333
height = 75 # as % of 1080: 0.0694444444444
width_pct = 0.0911458333333
height_pct = 0.0694444444444
# retrieve the skin height and width (supplied by ubiquifonts and stored in Home)
try:
screen_height = self.window.getProperty("SkinHeight")
screen_width = self.window.getProperty("SkinWidth")
except:
screen_height = 1080
screen_width = 1920
if screen_height == '':
if self.try_count >= 50:
self.try_count = 0
screen_height = 1080
screen_width = 1920
else:
self.try_image_position_again = True
self.try_count += 1
return
# if the screen width is blank (for whatever reason) use the screen height to estimate the width
# this should be very, VERY rare, and might only happen due to freakish timing
if screen_width == '':
screen_width = screen_height * 1.7777777777
screen_height = int(screen_height)
screen_width = int(screen_width)
# determine new dimensions of the image
img_height = int(round(decimal.Decimal(screen_height * height_pct), 0))
img_width = int(round(decimal.Decimal(screen_width * width_pct), 0))
# determine the new coordinates of the image
adj_height = screen_height - img_height
adj_width = screen_width - img_width
pos_top = int(round(decimal.Decimal(adj_height * pos_vertic), 0))
pos_left = int(round(decimal.Decimal(adj_width * pos_horiz), 0))
log('=============================')
log(screen_height)
log(screen_width)
log(adj_height)
log(adj_width)
log(img_height)
log(img_width)
log(pos_top)
log(pos_left)
log('=============================')
# reposition the image
self.update_image.setPosition(pos_left, pos_top)
# rescale the image
self.update_image.setWidth(img_width)
self.update_image.setHeight(img_height)
0
Example 14
Project: shuup Source File: test_attributes.py
def _populate_applied_attribute(aa):
if aa.attribute.type == AttributeType.BOOLEAN:
aa.value = True
aa.save()
assert aa.value is True, "Truth works"
assert aa.untranslated_string_value == "1", "Integer attributes save string representations"
aa.value = not 42 # (but it could be something else)
aa.save()
assert aa.value is False, "Lies work"
assert aa.untranslated_string_value == "0", "Integer attributes save string representations"
return
if aa.attribute.type == AttributeType.INTEGER:
aa.value = 320.51
aa.save()
assert aa.value == 320, "Integer attributes get rounded down"
assert aa.untranslated_string_value == "320", "Integer attributes save string representations"
return
if aa.attribute.type == AttributeType.DECIMAL:
aa.value = Decimal("0.636") # Surface pressure of Mars
aa.save()
assert aa.value * 1000 == 636, "Decimals work like they should"
assert aa.untranslated_string_value == "0.636", "Decimal attributes save string representations"
return
if aa.attribute.type == AttributeType.TIMEDELTA:
aa.value = 86400
aa.save()
assert aa.value.days == 1, "86,400 seconds is one day"
assert aa.untranslated_string_value == "86400", "Timedeltas are seconds as strings"
aa.value = datetime.timedelta(days=4)
aa.save()
assert aa.value.days == 4, "4 days remain as 4 days"
assert aa.untranslated_string_value == "345600", "Timedeltas are still seconds as strings"
return
if aa.attribute.type == AttributeType.UNTRANSLATED_STRING:
aa.value = "Dog Hello"
aa.save()
assert aa.value == "Dog Hello", "Untranslated strings work"
assert aa.untranslated_string_value == "Dog Hello", "Untranslated strings work"
return
if aa.attribute.type == AttributeType.TRANSLATED_STRING:
assert aa.attribute.is_translated
with override_settings(LANGUAGES=[(x, x) for x in ("en", "fi", "ga", "ja")]):
versions = {
"en": u"science fiction",
"fi": u"tieteiskirjallisuus",
"ga": u"ficsean eolaíochta",
"ja": u"空想科学小説",
}
for language_code, text in versions.items():
aa.set_current_language(language_code)
aa.value = text
aa.save()
assert aa.value == text, "Translated strings work"
for language_code, text in versions.items():
assert aa.safe_translation_getter("translated_string_value", language_code=language_code) == text, "%s translation is safe" % language_code
aa.set_current_language("xx")
assert aa.value == "", "untranslated version yields an empty string"
return
if aa.attribute.type == AttributeType.DATE:
aa.value = "2014-01-01"
assert aa.value == datetime.date(2014, 1, 1), "Date parsing works"
assert aa.untranslated_string_value == "2014-01-01", "Dates are saved as strings"
return
if aa.attribute.type == AttributeType.DATETIME:
with pytest.raises(TypeError):
aa.value = "yesterday"
dt = datetime.datetime(1997, 8, 12, 14)
aa.value = dt
assert aa.value.toordinal() == 729248, "Date assignment works"
assert aa.value.time().hour == 14, "The clock still works"
assert aa.untranslated_string_value == dt.isoformat(), "Datetimes are saved as strings too"
return
raise NotImplementedError("Not implemented: populating %s" % aa.attribute.type) # pragma: no cover
0
Example 15
Project: pyafipws Source File: wslpg.py
def test_ajuste_unificado(self):
"Prueba de ajuste unificado de una liquidación de granos (WSLPGv1.4)"
wslpg = self.wslpg
# solicito una liquidación para tener el COE autorizado a ajustar:
self.test_liquidacion()
coe = wslpg.COE
# solicito el último nro de orden para la nueva liquidación de ajuste:
pto_emision = 55
ok = wslpg.ConsultarUltNroOrden(pto_emision)
self.assertTrue(ok)
nro_orden = wslpg.NroOrden + 1
# creo el ajuste base y agrego los datos de certificado:
wslpg.CrearAjusteBase(pto_emision=pto_emision,
nro_orden=nro_orden,
coe_ajustado=coe,
cod_provincia=1,
cod_localidad=5,
)
wslpg.AgregarCertificado(tipo_certificado_deposito=5,
nro_certificado_deposito=555501200729,
peso_neto=10000,
cod_localidad_procedencia=3,
cod_prov_procedencia=1,
campania=1213,
fecha_cierre='2013-01-13',
peso_neto_total_certificado=10000)
# creo el ajuste de crédito (ver docuementación AFIP)
wslpg.CrearAjusteCredito(
diferencia_peso_neto=1000, diferencia_precio_operacion=100,
cod_grado="G2", val_grado=1.0, factor=100,
diferencia_precio_flete_tn=10,
datos_adicionales='AJUSTE CRED UNIF',
concepto_importe_iva_0='Alicuota Cero',
importe_ajustar_Iva_0=900,
concepto_importe_iva_105='Alicuota Diez',
importe_ajustar_Iva_105=800,
concepto_importe_iva_21='Alicuota Veintiuno',
importe_ajustar_Iva_21=700,
)
wslpg.AgregarDeduccion(codigo_concepto="AL",
detalle_aclaratorio="Deduc Alm",
dias_almacenaje="1",
precio_pkg_diario=0.01,
comision_gastos_adm=1.0,
base_calculo=1000.0,
alicuota=10.5, )
wslpg.AgregarRetencion(codigo_concepto="RI",
detalle_aclaratorio="Ret IVA",
base_calculo=1000,
alicuota=10.5, )
# creo el ajuste de débito (ver docuementación AFIP)
wslpg.CrearAjusteDebito(
diferencia_peso_neto=500, diferencia_precio_operacion=100,
cod_grado="G2", val_grado=1.0, factor=100,
diferencia_precio_flete_tn=0.01,
datos_adicionales='AJUSTE DEB UNIF',
concepto_importe_iva_0='Alic 0',
importe_ajustar_Iva_0=250,
concepto_importe_iva_105='Alic 10.5',
importe_ajustar_Iva_105=200,
concepto_importe_iva_21='Alicuota 21',
importe_ajustar_Iva_21=50,
)
wslpg.AgregarDeduccion(codigo_concepto="AL",
detalle_aclaratorio="Deduc Alm",
dias_almacenaje="1",
precio_pkg_diario=0.01,
comision_gastos_adm=1.0,
base_calculo=500.0,
alicuota=10.5, )
wslpg.AgregarRetencion(codigo_concepto="RI",
detalle_aclaratorio="Ret IVA",
base_calculo=100,
alicuota=10.5, )
# autorizo el ajuste:
ok = wslpg.AjustarLiquidacionUnificado()
self.assertTrue(ok)
# verificar respuesta general:
self.assertIsInstance(wslpg.COE, basestring)
self.assertEqual(len(wslpg.COE), len("330100013133"))
coe_ajustado = coe
coe = wslpg.COE
try:
self.assertEqual(wslpg.Estado, "AC")
self.assertEqual(wslpg.Subtotal, Decimal("-734.10"))
self.assertEqual(wslpg.TotalIva105, Decimal("-77.61"))
self.assertEqual(wslpg.TotalIva21, Decimal("0"))
self.assertEqual(wslpg.TotalRetencionesGanancias, Decimal("0"))
self.assertEqual(wslpg.TotalRetencionesIVA, Decimal("-94.50"))
self.assertEqual(wslpg.TotalNetoAPagar, Decimal("-716.68"))
self.assertEqual(wslpg.TotalIvaRg2300_07, Decimal("16.89"))
self.assertEqual(wslpg.TotalPagoSegunCondicion, Decimal("-733.57"))
# verificar ajuste credito
ok = wslpg.AnalizarAjusteCredito()
self.assertTrue(ok)
self.assertEqual(wslpg.GetParametro("precio_operacion"), "1.900")
self.assertEqual(wslpg.GetParametro("total_peso_neto"), "1000")
self.assertEqual(wslpg.TotalDeduccion, Decimal("11.05"))
self.assertEqual(wslpg.TotalPagoSegunCondicion, Decimal("2780.95"))
self.assertEqual(wslpg.GetParametro("importe_iva"), "293.16")
self.assertEqual(wslpg.GetParametro("operacion_con_iva"), "3085.16")
self.assertEqual(wslpg.GetParametro("deducciones", 0, "importe_iva"), "1.05")
# verificar ajuste debito
ok = wslpg.AnalizarAjusteDebito()
self.assertTrue(ok)
self.assertEqual(wslpg.GetParametro("precio_operacion"), "2.090")
self.assertEqual(wslpg.GetParametro("total_peso_neto"), "500")
self.assertEqual(wslpg.TotalDeduccion, Decimal("5.52"))
self.assertEqual(wslpg.TotalPagoSegunCondicion, Decimal("2047.38"))
self.assertEqual(wslpg.GetParametro("importe_iva"), "215.55")
self.assertEqual(wslpg.GetParametro("operacion_con_iva"), "2268.45")
self.assertEqual(wslpg.GetParametro("retenciones", 0, "importe_retencion"), "10.50")
finally:
# anulo el ajuste para evitar subsiguiente validación AFIP:
if coe:
self.test_anular(coe)
if coe_ajustado:
self.test_anular(coe_ajustado) # anulo también la liq. orig.
0
Example 16
Project: simplecoin_multi Source File: utils.py
def collect_pool_stats():
"""
Collects the necessary data to render the /pool_stats view or the API
"""
network_data = {}
for currency in currencies.itervalues():
if not currency.mineable:
continue
# Set currency defaults
currency_data = dict(code=currency.key,
name=currency.name,
merged=currency.merged,
difficulty=None,
hashrate=0,
height=None,
difficulty_avg=0,
reward=0,
hps=currency.algo.hashes_per_share,
blocks=[])
# Set round data defaults
round_data = dict(start_time=None,
shares=0,
avg_shares_to_solve=None,
shares_per_sec=None,
status="Idle",
currency_data=currency_data)
# Set nested dictionary defaults
network_data.setdefault(currency.algo.display, {})
network_data[currency.algo.display].setdefault(currency.key, round_data)
# Grab some blocks for this currency
blocks = (Block.query.filter_by(currency=currency.key).
options(db.joinedload('chain_payouts')).
order_by(Block.found_at.desc()).limit(4).all())
# Update the dicts if we found any blocks
if blocks:
# Update the currency_dict's blocks
currency_data['blocks'] = blocks
# Use the most recent block as the start_time
round_data['start_time'] = blocks[0].timestamp
# Check the cache for the currency's network data
currency_data.update(cache.get("{}_data".format(currency.key)) or {})
# Check the cache for the currency's profit data
profit = cache.get("{}_profitability".format(currency.key)) or '???'
if profit is not '???':
profit = profit.quantize(Decimal('0.00000001'))
profit = {'profitability': profit}
currency_data.update(profit)
# Check the cache for the currency's hashrate data
hashrate = cache.get("hashrate_{}".format(currency.key)) or 0
currency_data['hashrate'] = float(hashrate)
# Calculate the shares/second at this hashrate
shares_per_sec = currency_data['hashrate'] / currency_data['hps']
round_data['shares_per_sec'] = shares_per_sec
# Set the status
if round_data['shares_per_sec'] > 0:
round_data['status'] = "In Progress"
# Set the difficulty average
difficulty_avg = currency_data.get('difficulty_avg', 0)
if difficulty_avg != 0:
currency_data['difficulty_avg'] = difficulty_avg
else:
currency_data['difficulty_avg'] = currency_data['difficulty']
# Calculate the share solve average
avg_hashes_to_solve = difficulty_avg * (2 ** 32)
avg_shares_to_solve = avg_hashes_to_solve / currency_data['hps']
round_data['avg_shares_to_solve'] = avg_shares_to_solve
# Check the cache for the currency's current round data
key = 'current_block_{}_{}'.format(currency, currency.algo)
cached_round_data = redis_conn.hgetall(key) or {}
# Parse out some values from the cached round data
if cached_round_data is not {}:
chain_shares = [k for k in cached_round_data.keys()
if k.startswith("chain_") and k.endswith("shares")]
# Prefer the start time in the cache over the block, if available
if 'start_time' in cached_round_data:
round_data['start_time'] = int(float(cached_round_data['start_time']))
# Increment the round shares
for key in chain_shares:
round_data[key] = float(cached_round_data[key])
round_data['shares'] += round_data[key]
# Update our dicts
round_data['currency_data'].update(currency_data)
network_data[currency.algo.display][currency.key].update(round_data)
past_chain_profit = get_past_chain_profit()
server_status_default = dict(name='',
offline=True,
hashrate=0,
workers=0,
miners=0,
profit_4d=0,
currently_mining="???")
cached_server_status = cache.get('server_status') or []
server_status = {}
for powerp in powerpools.itervalues():
server_status.setdefault(powerp.key, server_status_default.copy())
if powerp.key in cached_server_status:
server_status[powerp.key] = cached_server_status[powerp.key]
server_status[powerp.key]['name'] = powerp.stratum_address
server_status[powerp.key]['profit_4d'] = past_chain_profit[powerp.chain.id]
block_stats_tab = session.get('block_stats_tab', "all")
# Session key may have expired but be returned as undefined
if block_stats_tab == "undefined":
block_stats_tab = session['block_stats_tab'] = "all"
return dict(network_data=network_data,
server_status=server_status,
powerpools=powerpools,
block_stats_tab=block_stats_tab)
0
Example 17
Project: SmartElect Source File: utils.py
def cleanup_lat_or_long(latlng):
"""
Given character string that is supposed to contain a latitude or longitude,
return either a valid Decimal value, or None.
Note: This assumes E/N and does not handle anything west of Greenwich or
south of the equator! If the input has a - or W or S in it, it'll probably
just fail to recognize it as a valid coordinate and return None.
"""
# Strip whitespace and degree signs
s = latlng.strip().rstrip(u'E\xb0')
# If nothing left, we have no data.
if len(s) == 0:
return None
d = None
if d is None:
# See if it's a simple decimal value
if '.' in s:
try:
d = parse_latlong(s)
except InvalidOperation:
pass
if d is None:
# 290250
# 204650
# Assume DDMMSS
m = re.match(r'^(\d\d)(\d\d)(\d\d)$', s)
if m:
val = float(m.group(1)) + float(m.group(2)) / 60.0 + float(m.group(3)) / 3600.0
d = parse_latlong(val)
if d is None:
# 12°37'49.30"
# 20° 6'9.54"E
# 20°29'33.84"E
# 10ْ .05 30 63
# 12° 2'54.62"
# 12°37'7.00"
# Assume the format is: degrees minutes seconds.milliseconds
m = re.match('r^(\d\d?)\D+(\d\d?)\D+(\d\d?)\D+(\d\d?)$', s)
if m:
parts = m.groups()
val = (float(parts[0])
+ float(parts[1]) / 60.0
+ float('%s.%s' % (parts[2], parts[3])) / 3600.0)
d = parse_latlong(val)
if d is None:
# Pick out the groups of digits
parts = _extract_numerals(s)
if len(parts) == 4:
# 12°37'49.30"
# 20° 6'9.54"E
# 20°29'33.84"E
# 10ْ .05 30 63
# 12° 2'54.62"
# 12°37'7.00"
# Assume the format is: degrees minutes seconds.fractionalseconds
val = (float(parts[0])
+ float(parts[1]) / 60.0
+ float('%s.%s' % (parts[2], parts[3])) / 3600.0)
d = parse_latlong(val)
elif len(parts) == 3:
# 12ْ 14 23
# 14ْ 25 816
# 57 " .579" .12
# 32ْ 453 700
# Hmm - assume degrees minutes seconds?
if float(parts[1]) > 60.0 or float(parts[2]) > 60.0:
# Just makes no sense - ignore it
return None
val = (float(parts[0])
+ float(parts[1]) / 60.0
+ float(parts[2]) / 3600.0)
d = parse_latlong(val)
elif len(parts) == 2:
# 12° 2
d = parse_latlong(float(parts[0]) + float(parts[1]) / 60.0)
if d is None:
return None
if d > Decimal('180.0'):
return None
return d
0
Example 18
def get_story(self, doc):
from reportlab.platypus import Paragraph, Spacer, TableStyle, Table
from reportlab.lib.units import mm
headlinestyle = self.get_style()
headlinestyle.fontSize = 15
headlinestyle.fontName = 'OpenSansBd'
tax_rates = set(
self.event.orders.exclude(payment_fee=0).values_list('payment_fee_tax_rate', flat=True)
.filter(status__in=self.form_data['status'])
.distinct().order_by()
)
tax_rates |= set(
a for a
in OrderPosition.objects.filter(order__event=self.event)
.filter(order__status__in=self.form_data['status'])
.values_list('tax_rate', flat=True).distinct().order_by()
)
tax_rates = sorted(tax_rates)
# Cols: Order ID | Order date | Status | Payment Date | Total | {gross tax} for t in taxes
colwidths = [a * doc.width for a in [0.12, 0.1, 0.10, 0.12, 0.08]]
if tax_rates:
colwidths += [0.48 / (len(tax_rates) * 2) * doc.width] * (len(tax_rates) * 2)
tstyledata = [
# Alignment
('ALIGN', (0, 0), (3, 0), 'LEFT'), # Headlines
('ALIGN', (4, 0), (-1, 0), 'CENTER'), # Headlines
('ALIGN', (4, 1), (-1, -1), 'RIGHT'), # Money
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
# Fonts
('FONTNAME', (0, 0), (-1, 0), 'OpenSansBd'), # Headlines
('FONTNAME', (0, -1), (-1, -1), 'OpenSansBd'), # Sums
]
for i, rate in enumerate(tax_rates):
tstyledata.append(('SPAN', (5 + 2 * i, 0), (6 + 2 * i, 0)))
story = [
Paragraph(_('Orders by tax rate ({currency})').format(currency=self.event.currency), headlinestyle),
Spacer(1, 5 * mm)
]
tdata = [
[
_('Order code'), _('Order date'), _('Status'), _('Payment date'), _('Order total'),
] + sum(([str(t) + ' %', ''] for t in tax_rates), []),
[
'', '', '', '', ''
] + sum(([_('Gross'), _('Tax')] for t in tax_rates), []),
]
qs = OrderPosition.objects.filter(
order__status__in=self.form_data['status'],
order__event=self.event,
).values(
'order__code', 'order__datetime', 'order__payment_date', 'order__total', 'order__payment_fee',
'order__payment_fee_tax_rate', 'order__payment_fee_tax_value', 'tax_rate', 'order__status'
).annotate(prices=Sum('price'), tax_values=Sum('tax_value')).order_by(
'order__datetime' if self.form_data['sort'] == 'datetime' else 'order__payment_date',
'order__datetime',
'order__code'
)
last_order_code = None
tax_sums = defaultdict(Decimal)
price_sums = defaultdict(Decimal)
status_labels = dict(Order.STATUS_CHOICE)
for op in qs:
if op['order__code'] != last_order_code:
tdata.append(
[
op['order__code'],
date_format(op['order__datetime'], "SHORT_DATE_FORMAT"),
status_labels[op['order__status']],
date_format(op['order__payment_date'], "SHORT_DATE_FORMAT") if op['order__payment_date'] else '',
str(op['order__total'])
] + sum((['', ''] for t in tax_rates), []),
)
last_order_code = op['order__code']
if op['order__payment_fee_tax_value']:
tdata[-1][5 + 2 * tax_rates.index(op['order__payment_fee_tax_rate'])] = str(op['order__payment_fee'])
tdata[-1][6 + 2 * tax_rates.index(op['order__payment_fee_tax_rate'])] = str(op['order__payment_fee_tax_value'])
tax_sums[op['order__payment_fee_tax_rate']] += op['order__payment_fee_tax_value']
price_sums[op['order__payment_fee_tax_rate']] += op['order__payment_fee']
i = tax_rates.index(op['tax_rate'])
tdata[-1][5 + 2 * i] = str(Decimal(tdata[-1][5 + 2 * i] or '0') + op['prices'])
tdata[-1][6 + 2 * i] = str(Decimal(tdata[-1][6 + 2 * i] or '0') + op['tax_values'])
tax_sums[op['tax_rate']] += op['tax_values']
price_sums[op['tax_rate']] += op['prices']
tdata.append(
[
_('Total'), '', '', '', ''
] + sum(([str(price_sums.get(t)), str(tax_sums.get(t))] for t in tax_rates), []),
)
table = Table(tdata, colWidths=colwidths, repeatRows=2)
table.setStyle(TableStyle(tstyledata))
story.append(table)
return story
0
Example 19
Project: pyfpdf Source File: test_invoice.py
@common.add_unittest
def dotest(outputname, nostamp):
# generate sample invoice (according Argentina's regulations)
from decimal import Decimal
f = Template(format="A4",
title="Sample Invoice", author="Sample Company",
subject="Sample Customer", keywords="Electronic TAX Invoice")
if nostamp:
f.pdf._putinfo = lambda: common.test_putinfo(f.pdf)
random = randomfake()
else:
import random
csvpath = os.path.join(common.basepath, "invoice.csv")
f.parse_csv(infile=csvpath, delimiter=";", decimal_sep=",")
detail = "Lorem ipsum dolor sit amet, consectetur. " * 30
items = []
for i in range(1, 30):
ds = "Sample product %s" % i
qty = random.randint(1,10)
price = round(random.random()*100,3)
code = "%s%s%02d" % (chr(random.randint(65,90)), chr(random.randint(65,90)),i)
items.append(dict(code=code, unit='u',
qty=qty, price=price,
amount=qty*price,
ds="%s: %s" % (i,ds)))
# divide and count lines
lines = 0
li_items = []
for it in items:
qty = it['qty']
code = it['code']
unit = it['unit']
for ds in f.split_multicell(it['ds'], 'item_description01'):
# add item description line (without price nor amount)
li_items.append(dict(code=code, ds=ds, qty=qty, unit=unit, price=None, amount=None))
# clean qty and code (show only at first)
unit = qty = code = None
# set last item line price and amount
li_items[-1].update(amount = it['amount'],
price = it['price'])
obs="\n<U>Detail:</U>\n\n" + detail
for ds in f.split_multicell(obs, 'item_description01'):
li_items.append(dict(code=code, ds=ds, qty=qty, unit=unit, price=None, amount=None))
# calculate pages:
lines = len(li_items)
max_lines_per_page = 24
pages = lines // (max_lines_per_page - 1)
if lines % (max_lines_per_page - 1): pages = pages + 1
# completo campos y hojas
for page in range(1, int(pages)+1):
f.add_page()
f['page'] = 'Page %s of %s' % (page, pages)
if pages>1 and page<pages:
s = 'Continues on page %s' % (page+1)
else:
s = ''
f['item_description%02d' % (max_lines_per_page+1)] = s
f["company_name"] = "Sample Company"
f["company_logo"] = os.path.join(common.basepath, "../tutorial/logo.png")
f["company_header1"] = "Some Address - somewhere -"
f["company_header2"] = "http://www.example.com"
f["company_footer1"] = "Tax Code ..."
f["company_footer2"] = "Tax/VAT ID ..."
f['number'] = '0001-00001234'
f['issue_date'] = '2010-09-10'
f['due_date'] = '2099-09-10'
f['customer_name'] = "Sample Client"
f['customer_address'] = "Siempreviva 1234"
# print line item...
li = 0
k = 0
total = Decimal("0.00")
for it in li_items:
k = k + 1
if k > page * (max_lines_per_page - 1):
break
if it['amount']:
total += Decimal("%.6f" % it['amount'])
if k > (page - 1) * (max_lines_per_page - 1):
li += 1
if it['qty'] is not None:
f['item_quantity%02d' % li] = it['qty']
if it['code'] is not None:
f['item_code%02d' % li] = it['code']
if it['unit'] is not None:
f['item_unit%02d' % li] = it['unit']
f['item_description%02d' % li] = it['ds']
if it['price'] is not None:
f['item_price%02d' % li] = "%0.3f" % it['price']
if it['amount'] is not None:
f['item_amount%02d' % li] = "%0.2f" % it['amount']
if pages == page:
f['net'] = "%0.2f" % (total/Decimal("1.21"))
f['vat'] = "%0.2f" % (total*(1-1/Decimal("1.21")))
f['total_label'] = 'Total:'
else:
f['total_label'] = 'SubTotal:'
f['total'] = "%0.2f" % total
f.render(outputname)
0
Example 20
def _calculate(self):
"""Checks the dimensions of the sheet are valid and consistent.
NB: this is called internally when needed; there should be no need for
user code to call it.
"""
# Check the dimensions are larger than zero.
for dimension in ('_sheet_width', '_sheet_height', '_columns', '_rows', '_label_width', '_label_height'):
if getattr(self, dimension) <= 0:
name = dimension.replace('_', ' ').strip().capitalize()
raise InvalidDimension("{0:s} must be greater than zero.".format(name))
# Check margins / gaps are not smaller than zero if given.
# At the same time, force the values to decimals.
for margin in ('_left_margin', '_column_gap', '_right_margin', '_top_margin', '_row_gap', '_bottom_margin',
'_left_padding', '_right_padding', '_top_padding', '_bottom_padding'):
val = getattr(self, margin)
if val is not None:
if margin in self._autoset:
val = None
else:
val = Decimal(val)
if val < 0:
name = margin.replace('_', ' ').strip().capitalize()
raise InvalidDimension("{0:s} cannot be less than zero.".format(name))
setattr(self, margin, val)
else:
self._autoset.add(margin)
# Check the corner radius.
if self._corner_radius < 0:
raise InvalidDimension("Corner radius cannot be less than zero.")
if self._corner_radius > (self._label_width / 2):
raise InvalidDimension("Corner radius cannot be more than half the label width.")
if self._corner_radius > (self._label_height / 2):
raise InvalidDimension("Corner radius cannot be more than half the label height.")
# If there is no padding, we don't need the padding radius.
if (self._left_padding + self._right_padding + self._top_padding + self._bottom_padding) == 0:
if self._padding_radius != 0:
raise InvalidDimension("Padding radius must be zero if there is no padding.")
else:
if (self._left_padding + self._right_padding) >= self._label_width:
raise InvalidDimension("Sum of horizontal padding must be less than the label width.")
if (self._top_padding + self._bottom_padding) >= self._label_height:
raise InvalidDimension("Sum of vertical padding must be less than the label height.")
if self._padding_radius < 0:
raise InvalidDimension("Padding radius cannot be less than zero.")
# Calculate the amount of spare space.
hspace = self._sheet_width - (self._label_width * self._columns)
vspace = self._sheet_height - (self._label_height * self._rows)
# Cannot fit.
if hspace < 0:
raise InvalidDimension("Labels are too wide to fit on the sheet.")
if vspace < 0:
raise InvalidDimension("Labels are too tall to fit on the sheet.")
# Process the horizontal margins / gaps.
hcount = 1 + self._columns
if self._left_margin is not None:
hspace -= self._left_margin
if hspace < 0:
raise InvalidDimension("Left margin is too wide for the labels to fit on the sheet.")
hcount -= 1
if self._column_gap is not None:
hspace -= ((self._columns - 1) * self._column_gap)
if hspace < 0:
raise InvalidDimension("Column gap is too wide for the labels to fit on the sheet.")
hcount -= (self._columns - 1)
if self._right_margin is not None:
hspace -= self._right_margin
if hspace < 0.01 and hspace > -0.01:
self._right_margin += hspace
hspace = 0
if hspace < 0:
raise InvalidDimension("Right margin is too wide for the labels to fit on the sheet.")
hcount -= 1
# Process the vertical margins / gaps.
vcount = 1 + self._rows
if self._top_margin is not None:
vspace -= self._top_margin
if vspace < 0:
raise InvalidDimension("Top margin is too tall for the labels to fit on the sheet.")
vcount -= 1
if self._row_gap is not None:
vspace -= ((self._rows - 1) * self._row_gap)
if vspace < 0:
raise InvalidDimension("Row gap is too tall for the labels to fit on the sheet.")
vcount -= (self._rows - 1)
if self._bottom_margin is not None:
vspace -= self._bottom_margin
if vspace < 0.01 and vspace > -0.01:
self._bottom_margin += vspace
vspace = 0
if vspace < 0:
raise InvalidDimension("Bottom margin is too tall for the labels to fit on the sheet.")
vcount -= 1
# If all the margins are specified, they must use up all available space.
if hcount == 0 and hspace != 0:
raise InvalidDimension("Not all width used by manually specified margins/gaps; {}mm left.".format(hspace))
if vcount == 0 and vspace != 0:
raise InvalidDimension("Not all height used by manually specified margins/gaps; {}mm left.".format(vspace))
# Split any extra horizontal space and allocate it.
if hcount:
auto_margin = hspace / hcount
for margin in ('_left_margin', '_column_gap', '_right_margin'):
if getattr(self, margin) is None:
setattr(self, margin, auto_margin)
# And allocate any extra vertical space.
if vcount:
auto_margin = vspace / vcount
for margin in ('_top_margin', '_row_gap', '_bottom_margin'):
if getattr(self, margin) is None:
setattr(self, margin, auto_margin)
0
Example 21
Project: moto Source File: test_dynamodb_table_with_range_key.py
@mock_dynamodb2
def test_boto3_query_gsi_range_comparison():
table = _create_table_with_range_key()
table.put_item(Item={
'forum_name': 'the-key',
'subject': '123',
'username': 'johndoe',
'created': 3,
})
table.put_item(Item={
'forum_name': 'the-key',
'subject': '456',
'username': 'johndoe',
'created': 1,
})
table.put_item(Item={
'forum_name': 'the-key',
'subject': '789',
'username': 'johndoe',
'created': 2,
})
table.put_item(Item={
'forum_name': 'the-key',
'subject': '159',
'username': 'janedoe',
'created': 2,
})
table.put_item(Item={
'forum_name': 'the-key',
'subject': '601',
'username': 'janedoe',
'created': 5,
})
# Test a query returning all johndoe items
results = table.query(
KeyConditionExpression=Key('username').eq('johndoe') & Key("created").gt(0),
ScanIndexForward=True,
IndexName='TestGSI',
)
expected = ["456", "789", "123"]
for index, item in enumerate(results['Items']):
item["subject"].should.equal(expected[index])
# Return all johndoe items again, but in reverse
results = table.query(
KeyConditionExpression=Key('username').eq('johndoe') & Key("created").gt(0),
ScanIndexForward=False,
IndexName='TestGSI',
)
for index, item in enumerate(reversed(results['Items'])):
item["subject"].should.equal(expected[index])
# Filter the creation to only return some of the results
# And reverse order of hash + range key
results = table.query(
KeyConditionExpression=Key("created").gt(1) & Key('username').eq('johndoe'),
ConsistentRead=True,
IndexName='TestGSI',
)
results['Count'].should.equal(2)
# Filter to return no results
results = table.query(
KeyConditionExpression=Key('username').eq('janedoe') & Key("created").gt(9),
IndexName='TestGSI',
)
results['Count'].should.equal(0)
results = table.query(
KeyConditionExpression=Key('username').eq('janedoe') & Key("created").eq(5),
IndexName='TestGSI',
)
results['Count'].should.equal(1)
# Test range key sorting
results = table.query(
KeyConditionExpression=Key('username').eq('johndoe') & Key("created").gt(0),
IndexName='TestGSI',
)
expected = [Decimal('1'), Decimal('2'), Decimal('3')]
for index, item in enumerate(results['Items']):
item["created"].should.equal(expected[index])
0
Example 22
Project: edison Source File: emitters.py
def construct(self):
"""
Recursively serialize a lot of types, and
in cases where it doesn't recognize the type,
it will fall back to Django's `smart_unicode`.
Returns `dict`.
"""
def _any(thing, fields=None):
"""
Dispatch, all types are routed through here.
"""
ret = None
if isinstance(thing, QuerySet):
ret = _qs(thing, fields)
elif isinstance(thing, (tuple, list, set)):
ret = _list(thing, fields)
elif isinstance(thing, dict):
ret = _dict(thing, fields)
elif isinstance(thing, decimal.Decimal):
ret = str(thing)
elif isinstance(thing, Model):
ret = _model(thing, fields)
elif isinstance(thing, HttpResponse):
raise HttpStatusCode(thing)
elif inspect.isfunction(thing):
if not inspect.getargspec(thing)[0]:
ret = _any(thing())
elif hasattr(thing, '__emittable__'):
f = thing.__emittable__
if inspect.ismethod(f) and len(inspect.getargspec(f)[0]) == 1:
ret = _any(f())
elif repr(thing).startswith("<django.db.models.fields.related.RelatedManager"):
ret = _any(thing.all())
else:
ret = smart_unicode(thing, strings_only=True)
return ret
def _fk(data, field):
"""
Foreign keys.
"""
return _any(getattr(data, field.name))
def _related(data, fields=None):
"""
Foreign keys.
"""
return [ _model(m, fields) for m in data.iterator() ]
def _m2m(data, field, fields=None):
"""
Many to many (re-route to `_model`.)
"""
return [ _model(m, fields) for m in getattr(data, field.name).iterator() ]
def _model(data, fields=None):
"""
Models. Will respect the `fields` and/or
`exclude` on the handler (see `typemapper`.)
"""
ret = { }
handler = self.in_typemapper(type(data), self.anonymous)
get_absolute_uri = False
if handler or fields:
v = lambda f: getattr(data, f.attname)
if handler:
fields = getattr(handler, 'fields')
if not fields or hasattr(handler, 'fields'):
"""
Fields was not specified, try to find teh correct
version in the typemapper we were sent.
"""
mapped = self.in_typemapper(type(data), self.anonymous)
get_fields = set(mapped.fields)
exclude_fields = set(mapped.exclude).difference(get_fields)
if 'absolute_uri' in get_fields:
get_absolute_uri = True
if not get_fields:
get_fields = set([ f.attname.replace("_id", "", 1)
for f in data._meta.fields + data._meta.virtual_fields])
if hasattr(mapped, 'extra_fields'):
get_fields.update(mapped.extra_fields)
# sets can be negated.
for exclude in exclude_fields:
if isinstance(exclude, basestring):
get_fields.discard(exclude)
elif isinstance(exclude, re._pattern_type):
for field in get_fields.copy():
if exclude.match(field):
get_fields.discard(field)
else:
get_fields = set(fields)
met_fields = self.method_fields(handler, get_fields)
for f in data._meta.local_fields + data._meta.virtual_fields:
if f.serialize and not any([ p in met_fields for p in [ f.attname, f.name ]]):
if not f.rel:
if f.attname in get_fields:
ret[f.attname] = _any(v(f))
get_fields.remove(f.attname)
else:
if f.attname[:-3] in get_fields:
ret[f.name] = _fk(data, f)
get_fields.remove(f.name)
for mf in data._meta.many_to_many:
if mf.serialize and mf.attname not in met_fields:
if mf.attname in get_fields:
ret[mf.name] = _m2m(data, mf)
get_fields.remove(mf.name)
# try to get the remainder of fields
for maybe_field in get_fields:
if isinstance(maybe_field, (list, tuple)):
model, fields = maybe_field
inst = getattr(data, model, None)
if inst:
if hasattr(inst, 'all'):
ret[model] = _related(inst, fields)
elif callable(inst):
if len(inspect.getargspec(inst)[0]) == 1:
ret[model] = _any(inst(), fields)
else:
ret[model] = _model(inst, fields)
elif maybe_field in met_fields:
# Overriding normal field which has a "resource method"
# so you can alter the contents of certain fields without
# using different names.
ret[maybe_field] = _any(met_fields[maybe_field](data))
else:
maybe = getattr(data, maybe_field, None)
if maybe is not None:
if callable(maybe):
if len(inspect.getargspec(maybe)[0]) <= 1:
ret[maybe_field] = _any(maybe())
else:
ret[maybe_field] = _any(maybe)
else:
handler_f = getattr(handler or self.handler, maybe_field, None)
if handler_f:
ret[maybe_field] = _any(handler_f(data))
else:
for f in data._meta.fields:
ret[f.attname] = _any(getattr(data, f.attname))
fields = dir(data.__class__) + ret.keys()
add_ons = [k for k in dir(data) if k not in fields]
for k in add_ons:
ret[k] = _any(getattr(data, k))
# resouce uri
if self.in_typemapper(type(data), self.anonymous):
handler = self.in_typemapper(type(data), self.anonymous)
if hasattr(handler, 'resource_uri'):
url_id, fields = handler.resource_uri(data)
try:
ret['resource_uri'] = reverser( lambda: (url_id, fields) )()
except NoReverseMatch, e:
pass
if hasattr(data, 'get_api_url') and 'resource_uri' not in ret:
try: ret['resource_uri'] = data.get_api_url()
except: pass
# absolute uri
if hasattr(data, 'get_absolute_url') and get_absolute_uri:
try: ret['absolute_uri'] = data.get_absolute_url()
except: pass
return ret
def _qs(data, fields=None):
"""
Querysets.
"""
return [ _any(v, fields) for v in data ]
def _list(data, fields=None):
"""
Lists.
"""
return [ _any(v, fields) for v in data ]
def _dict(data, fields=None):
"""
Dictionaries.
"""
return dict([ (k, _any(v, fields)) for k, v in data.iteritems() ])
# Kickstart the seralizin'.
return _any(self.data, self.fields)
0
Example 23
def import_stations(file_name):
"""
Expects a file containing lines of the form e.g.:
226 JALESORE 1122 172 26.65 85.78
275 PHIDIM (PANCHTH 1419 1205 27.15 87.75
unused Station name <-id <-elev <-lat <-lon
0123456789012345678901234567890123456789012345678901234567890123456789
0 1 2 3 4 5 6
"""
for line in open(file_name, "r").readlines():
try:
place_id_text = line[27:33]
except IndexError:
continue
else:
try:
station_id = int(place_id_text)
except ValueError:
continue
else:
station_name = line[8:25].strip() # don't restrict if they add more
elevation_metres = int(line[37:43])
latitude = Decimal(line[47:53])
longitude = Decimal(line[57:623])
place_table_name = climate_place._tablename
existing_place = db(
climate_station_id.station_id == station_id
).select().first()
if existing_place is None:
place_id = climate_place.insert(
longitude = longitude,
latitude = latitude
)
else:
print "Update:"
place_id = existing_place.id
db(climate_place.id == place_id).update(
longitude = longitude,
latitude = latitude
)
def insert_or_update(
table,
place_id,
attribute,
format,
value
):
table_name = table._tablename
if db(table.id == place_id).count() == 0:
value = repr(value)
formatted_value = format(value)
db.executesql(
"INSERT INTO %(table_name)s "
"(id, %(attribute)s) "
"VALUES (%(place_id)i, %(formatted_value)s);" % locals()
)
else:
db(table.id == place_id).update(
**{attribute: value}
)
insert_or_update(
climate_station_name,
place_id,
"name",
str,
station_name
)
insert_or_update(
climate_elevation,
place_id,
"elevation_metres",
float,
elevation_metres
)
insert_or_update(
climate_station_id,
place_id,
"station_id",
int,
station_id
)
print place_id, station_id, station_name, latitude, longitude, elevation_metres
db.commit()
0
Example 24
Project: pika Source File: data.py
def decode_value(encoded, offset):
"""Decode the value passed in returning the decoded value and the number
of bytes read in addition to the starting offset.
:param str encoded: The binary encoded data to decode
:param int offset: The starting byte offset
:rtype: tuple
:raises: pika.exceptions.InvalidFieldTypeException
"""
# slice to get bytes in Python 3 and str in Python 2
kind = encoded[offset:offset + 1]
offset += 1
# Bool
if kind == b't':
value = struct.unpack_from('>B', encoded, offset)[0]
value = bool(value)
offset += 1
# Short-Short Int
elif kind == b'b':
value = struct.unpack_from('>B', encoded, offset)[0]
offset += 1
# Short-Short Unsigned Int
elif kind == b'B':
value = struct.unpack_from('>b', encoded, offset)[0]
offset += 1
# Short Int
elif kind == b'U':
value = struct.unpack_from('>h', encoded, offset)[0]
offset += 2
# Short Unsigned Int
elif kind == b'u':
value = struct.unpack_from('>H', encoded, offset)[0]
offset += 2
# Long Int
elif kind == b'I':
value = struct.unpack_from('>i', encoded, offset)[0]
offset += 4
# Long Unsigned Int
elif kind == b'i':
value = struct.unpack_from('>I', encoded, offset)[0]
offset += 4
# Long-Long Int
elif kind == b'L':
value = long(struct.unpack_from('>q', encoded, offset)[0])
offset += 8
# Long-Long Unsigned Int
elif kind == b'l':
value = long(struct.unpack_from('>Q', encoded, offset)[0])
offset += 8
# Float
elif kind == b'f':
value = long(struct.unpack_from('>f', encoded, offset)[0])
offset += 4
# Double
elif kind == b'd':
value = long(struct.unpack_from('>d', encoded, offset)[0])
offset += 8
# Decimal
elif kind == b'D':
decimals = struct.unpack_from('B', encoded, offset)[0]
offset += 1
raw = struct.unpack_from('>i', encoded, offset)[0]
offset += 4
value = decimal.Decimal(raw) * (decimal.Decimal(10) ** -decimals)
# Short String
elif kind == b's':
value, offset = decode_short_string(encoded, offset)
# Long String
elif kind == b'S':
length = struct.unpack_from('>I', encoded, offset)[0]
offset += 4
value = encoded[offset:offset + length].decode('utf8')
offset += length
# Field Array
elif kind == b'A':
length = struct.unpack_from('>I', encoded, offset)[0]
offset += 4
offset_end = offset + length
value = []
while offset < offset_end:
v, offset = decode_value(encoded, offset)
value.append(v)
# Timestamp
elif kind == b'T':
value = datetime.utcfromtimestamp(struct.unpack_from('>Q', encoded,
offset)[0])
offset += 8
# Field Table
elif kind == b'F':
(value, offset) = decode_table(encoded, offset)
# Null / Void
elif kind == b'V':
value = None
else:
raise exceptions.InvalidFieldTypeException(kind)
return value, offset
0
Example 25
Project: vumi-go Source File: test_api.py
@inlineCallbacks
def test_transaction(self):
account = self.account
account2 = self.account2
# Set the message cost
mk_message_cost(
tag_pool=self.pool1,
message_direction=MessageCost.DIRECTION_INBOUND,
message_cost=0.6,
storage_cost=0.5,
session_cost=0.3,
markup_percent=10.0)
credit_amount = MessageCost.calculate_credit_cost(
Decimal('0.6'),
Decimal('0.5'),
Decimal('10.0'),
Decimal('0.3'),
session_created=False)
credit_amount_for_session = MessageCost.calculate_credit_cost(
Decimal('0.6'),
Decimal('0.5'),
Decimal('10.0'),
Decimal('0.3'),
session_created=True)
# Create a transaction
transaction = yield self.create_api_transaction(
account_number=account.account_number,
message_id='msg-id-1',
tag_pool_name='pool1',
tag_name='tag1',
message_direction=MessageCost.DIRECTION_INBOUND,
session_created=False,
transaction_type=Transaction.TRANSACTION_TYPE_MESSAGE)
self.assert_result(
result=transaction,
model=Transaction.objects.latest('created'),
message_id='msg-id-1',
tag_name='tag1',
tag_pool_name='pool1',
account_number=account.account_number,
credit_amount=-credit_amount,
credit_factor=Decimal('10.0'),
markup_percent=Decimal('10.0'),
message_cost=Decimal('0.6'),
storage_cost=Decimal('0.5'),
session_cost=Decimal('0.3'),
session_created=False,
message_credits=get_message_credits(0.6, 10.0),
storage_credits=get_storage_credits(0.5, 10.0),
session_credits=get_session_credits(0.3, 10.0),
status=Transaction.STATUS_COMPLETED,
message_direction=MessageCost.DIRECTION_INBOUND,
transaction_type=Transaction.TRANSACTION_TYPE_MESSAGE)
# Get the account and make sure the credit balance was updated
account = Account.objects.get(id=account.id)
self.assertEqual(account.credit_balance, -credit_amount)
# Create a transaction (with session_created=True)
transaction = yield self.create_api_transaction(
account_number=account.account_number,
message_id='msg-id-2',
tag_pool_name='pool1',
tag_name='tag1',
message_direction=MessageCost.DIRECTION_INBOUND,
session_created=True)
self.assert_result(
result=transaction,
model=Transaction.objects.latest('created'),
message_id='msg-id-2',
tag_name='tag1',
tag_pool_name='pool1',
account_number=account.account_number,
credit_amount=-credit_amount_for_session,
credit_factor=Decimal('10.0'),
markup_percent=Decimal('10.0'),
message_cost=Decimal('0.6'),
storage_cost=Decimal('0.5'),
session_cost=Decimal('0.3'),
session_created=True,
message_credits=get_message_credits(0.6, 10.0),
storage_credits=get_storage_credits(0.5, 10.0),
session_credits=get_session_credits(0.3, 10.0),
status=Transaction.STATUS_COMPLETED,
message_direction=MessageCost.DIRECTION_INBOUND)
# Get the account and make sure the credit balance was updated
account = Account.objects.get(id=account.id)
self.assertEqual(account.credit_balance,
-(credit_amount + credit_amount_for_session))
# Test override of cost by cost for specific account
mk_message_cost(
account=account,
tag_pool=self.pool1,
message_direction=MessageCost.DIRECTION_INBOUND,
message_cost=9.0,
storage_cost=8.0,
session_cost=7.0,
markup_percent=11.0)
transaction = yield self.create_api_transaction(
account_number=account.account_number,
message_id='msg-id-3',
tag_pool_name='pool1',
tag_name='tag1',
message_direction=MessageCost.DIRECTION_INBOUND,
session_created=False)
credit_amount = MessageCost.calculate_credit_cost(
Decimal('9.0'),
Decimal('8.0'),
Decimal('11.0'),
Decimal('7.0'),
session_created=False)
self.assert_result(
result=transaction,
model=Transaction.objects.latest('created'),
message_id='msg-id-3',
tag_name='tag1',
tag_pool_name='pool1',
account_number=account.account_number,
credit_amount=-credit_amount,
credit_factor=Decimal('10.0'),
markup_percent=Decimal('11.0'),
message_cost=Decimal('9.0'),
storage_cost=Decimal('8.0'),
session_cost=Decimal('7.0'),
session_created=False,
message_credits=get_message_credits(9.0, 11.0),
storage_credits=get_storage_credits(8.0, 11.0),
session_credits=get_session_credits(7.0, 11.0),
status=Transaction.STATUS_COMPLETED,
message_direction=MessageCost.DIRECTION_INBOUND)
# Test fallback to default cost
mk_message_cost(
message_direction=MessageCost.DIRECTION_OUTBOUND,
message_cost=0.1,
storage_cost=0.3,
session_cost=0.2,
markup_percent=12.0)
transaction = yield self.create_api_transaction(
account_number=account2.account_number,
message_id='msg-id-4',
tag_pool_name='pool2',
tag_name='tag2',
message_direction=MessageCost.DIRECTION_OUTBOUND,
session_created=False)
credit_amount = MessageCost.calculate_credit_cost(
Decimal('0.1'),
Decimal('0.3'),
Decimal('12.0'),
Decimal('0.2'),
session_created=False)
self.assert_result(
result=transaction,
model=Transaction.objects.latest('created'),
message_id='msg-id-4',
tag_name='tag2',
tag_pool_name='pool2',
account_number=account2.account_number,
credit_amount=-credit_amount,
credit_factor=Decimal('10.0'),
markup_percent=Decimal('12.0'),
message_cost=Decimal('0.1'),
storage_cost=Decimal('0.3'),
session_cost=Decimal('0.2'),
session_created=False,
message_credits=get_message_credits(0.1, 12.0),
storage_credits=get_storage_credits(0.3, 12.0),
session_credits=get_session_credits(0.2, 12.0),
status=Transaction.STATUS_COMPLETED,
message_direction=MessageCost.DIRECTION_OUTBOUND)
# Test that message direction is correctly checked for
# in the fallback case.
try:
yield self.create_api_transaction(
account_number=account2.account_number,
message_id='msg-id-4',
tag_pool_name='pool2',
tag_name='tag2',
message_direction=MessageCost.DIRECTION_INBOUND,
session_created=False)
except ApiCallError as e:
self.assertEqual(e.response.responseCode, 500)
self.assertEqual(
e.message,
"Unable to determine Inbound message cost for account"
" %s and tag pool pool2" % (account2.account_number,))
else:
self.fail("Expected transaction creation to fail.")
[failure] = self.flushLoggedErrors('go.billing.utils.BillingError')
self.assertEqual(
failure.value.args,
("Unable to determine Inbound message cost for account"
" %s and tag pool pool2" % (account2.account_number,),))
# Test that transactions for unknown accounts raised a BillingError
try:
yield self.create_api_transaction(
account_number='unknown-account',
message_id='msg-id-5',
tag_pool_name='pool2',
tag_name='tag2',
message_direction=MessageCost.DIRECTION_OUTBOUND,
session_created=False)
except ApiCallError as e:
self.assertEqual(e.response.responseCode, 500)
self.assertEqual(
e.message,
"Unable to find billing account unknown-account while"
" checking credit balance. Message was Outbound to/from"
" tag pool pool2.")
else:
self.fail("Expected transaction creation to fail.")
[failure] = self.flushLoggedErrors('go.billing.utils.BillingError')
self.assertEqual(
failure.value.args,
("Unable to find billing account unknown-account while"
" checking credit balance. Message was Outbound to/from"
" tag pool pool2.",))
0
Example 26
Project: DistrictBuilder Source File: setup.py
def import_shape(self, store, config):
"""
Import a shapefile, based on a config.
Parameters:
config -- A dictionary with 'shapepath', 'geolevel', 'name_field', 'region_filters' and 'subject_fields' keys.
"""
def get_shape_tree(shapefile, feature):
shpfields = shapefile.xpath('Fields/Field')
builtid = ''
for idx in range(0,len(shpfields)):
idpart = shapefile.xpath('Fields/Field[@type="tree" and @pos=%d]' % idx)
if len(idpart) > 0:
idpart = idpart[0]
part = feature.get(idpart.get('name'))
# strip any spaces in the treecode
if not (isinstance(part, types.StringTypes)):
part = '%d' % part
part = part.strip(' ')
width = int(idpart.get('width'))
builtid = '%s%s' % (builtid, part.zfill(width))
return builtid
def get_shape_portable(shapefile, feature):
field = shapefile.xpath('Fields/Field[@type="portable"]')[0]
portable = feature.get(field.get('name'))
if not (isinstance(portable, types.StringTypes)):
portable = '%d' % portable
return portable
def get_shape_name(shapefile, feature):
field = shapefile.xpath('Fields/Field[@type="name"]')[0]
strname = feature.get(field.get('name'))
if type(strname) == str:
return strname.decode('latin-1')
else:
return str(strname)
for h,shapefile in enumerate(config['shapefiles']):
if not exists(shapefile.get('path')):
logger.info("""
ERROR:
The filename specified by the configuration:
%s
Could not be found. Please check the configuration and try again.
""", shapefile.get('path'))
raise IOError('Cannot find the file "%s"' % shapefile.get('path'))
ds = DataSource(shapefile.get('path'))
logger.debug('Importing from %s, %d of %d shapefiles...', ds, h+1, len(config['shapefiles']))
lyr = ds[0]
logger.debug('%d objects in shapefile', len(lyr))
level = Geolevel.objects.get(name=config['geolevel'].lower()[:50])
# Create the subjects we need
subject_objects = {}
for sconfig in config['subject_fields']:
attr_name = sconfig.get('field')
foundalias = False
for elem in sconfig.getchildren():
if elem.tag == 'Subject':
foundalias = True
sub = Subject.objects.get(name=elem.get('id').lower()[:50])
if not foundalias:
sub = Subject.objects.get(name=sconfig.get('id').lower()[:50])
subject_objects[attr_name] = sub
subject_objects['%s_by_id' % sub.name] = attr_name
progress = 0.0
logger.debug('0% .. ')
for i,feat in enumerate(lyr):
if (float(i) / len(lyr)) > (progress + 0.1):
progress += 0.1
logger.debug('%2.0f%% .. ', progress * 100)
levels = [level]
for region, filter_list in config['region_filters'].iteritems():
# Check for applicability of the function by examining the config
geolevel_xpath = '/DistrictBuilder/GeoLevels/GeoLevel[@name="%s"]' % config['geolevel']
geolevel_config = store.data.xpath(geolevel_xpath)
geolevel_region_xpath = '/DistrictBuilder/Regions/Region[@name="%s"]/GeoLevels//GeoLevel[@ref="%s"]' % (region, geolevel_config[0].get('id'))
if len(store.data.xpath(geolevel_region_xpath)) > 0:
# If the geolevel is in the region, check the filters
for f in filter_list:
if f(feat) == True:
levels.append(Geolevel.objects.get(name='%s_%s' % (region, level.name)))
prefetch = Geounit.objects.filter(
Q(name=get_shape_name(shapefile, feat)),
Q(geolevel__in=levels),
Q(portable_id=get_shape_portable(shapefile, feat)),
Q(tree_code=get_shape_tree(shapefile, feat))
)
if prefetch.count() == 0:
try :
# Store the geos geometry
# Buffer by 0 to get rid of any self-intersections which may make this geometry invalid.
geos = feat.geom.geos.buffer(0)
# Coerce the geometry into a MultiPolygon
if geos.geom_type == 'MultiPolygon':
my_geom = geos
elif geos.geom_type == 'Polygon':
my_geom = MultiPolygon(geos)
simple = my_geom.simplify(tolerance=Decimal(config['tolerance']),preserve_topology=True)
if simple.geom_type != 'MultiPolygon':
simple = MultiPolygon(simple)
center = my_geom.centroid
geos = None
# Ensure the centroid is within the geometry
if not center.within(my_geom):
# Get the first polygon in the multipolygon
first_poly = my_geom[0]
# Get the extent of the first poly
first_poly_extent = first_poly.extent
min_x = first_poly_extent[0]
max_x = first_poly_extent[2]
# Create a line through the bbox and the poly center
my_y = first_poly.centroid.y
centerline = LineString( (min_x, my_y), (max_x, my_y))
# Get the intersection of that line and the poly
intersection = centerline.intersection(first_poly)
if type(intersection) is MultiLineString:
intersection = intersection[0]
# the center of that line is my within-the-poly centroid.
center = intersection.centroid
first_poly = first_poly_extent = min_x = max_x = my_y = centerline = intersection = None
g = Geounit(geom = my_geom,
name = get_shape_name(shapefile, feat),
simple = simple,
center = center,
portable_id = get_shape_portable(shapefile, feat),
tree_code = get_shape_tree(shapefile, feat)
)
g.save()
g.geolevel = levels
g.save()
except:
logger.info('Failed to import geometry for feature %d', feat.fid)
logger.debug(traceback.format_exc())
continue
else:
g = prefetch[0]
g.geolevel = levels
g.save()
if not config['attributes']:
self.set_geounit_characteristic(g, subject_objects, feat)
logger.info('100%')
if config['attributes']:
progress = 0
logger.info("Assigning subject values to imported geography...")
logger.info('0% .. ')
for h,attrconfig in enumerate(config['attributes']):
if not exists(attrconfig.get('path')):
logger.info("""
ERROR:
The filename specified by the configuration:
%s
Could not be found. Please check the configuration and try again.
""", attrconfig.get('path'))
raise IOError('Cannot find the file "%s"' % attrconfig.get('path'))
lyr = DataSource(attrconfig.get('path'))[0]
found = 0
missed = 0
for i,feat in enumerate(lyr):
if (float(i) / len(lyr)) > (progress + 0.1):
progress += 0.1
logger.info('%2.0f%% .. ', progress * 100)
gid = get_shape_treeid(attrconfig, feat)
g = Geounit.objects.filter(tree_code=gid)
if g.count() > 0:
self.set_geounit_characteristic(g[0], subject_objects, feat)
logger.info('100%')
0
Example 27
Project: Django--an-app-at-a-time Source File: writer.py
@classmethod
def serialize(cls, value):
"""
Serializes the value to a string that's parsable by Python, along
with any needed imports to make that string work.
More advanced than repr() as it can encode things
like datetime.datetime.now.
"""
# FIXME: Ideally Promise would be reconstructible, but for now we
# use force_text on them and defer to the normal string serialization
# process.
if isinstance(value, Promise):
value = force_text(value)
# Sequences
if isinstance(value, (list, set, tuple)):
imports = set()
strings = []
for item in value:
item_string, item_imports = cls.serialize(item)
imports.update(item_imports)
strings.append(item_string)
if isinstance(value, set):
# Don't use the literal "{%s}" as it doesn't support empty set
format = "set([%s])"
elif isinstance(value, tuple):
# When len(value)==0, the empty tuple should be serialized as
# "()", not "(,)" because (,) is invalid Python syntax.
format = "(%s)" if len(value) != 1 else "(%s,)"
else:
format = "[%s]"
return format % (", ".join(strings)), imports
# Dictionaries
elif isinstance(value, dict):
imports = set()
strings = []
for k, v in value.items():
k_string, k_imports = cls.serialize(k)
v_string, v_imports = cls.serialize(v)
imports.update(k_imports)
imports.update(v_imports)
strings.append((k_string, v_string))
return "{%s}" % (", ".join("%s: %s" % (k, v) for k, v in strings)), imports
# Datetimes
elif isinstance(value, datetime.datetime):
value_repr = cls.serialize_datetime(value)
imports = ["import datetime"]
if value.tzinfo is not None:
imports.append("from django.utils.timezone import utc")
return value_repr, set(imports)
# Dates
elif isinstance(value, datetime.date):
value_repr = repr(value)
if isinstance(value, datetime_safe.date):
value_repr = "datetime.%s" % value_repr
return value_repr, {"import datetime"}
# Times
elif isinstance(value, datetime.time):
value_repr = repr(value)
if isinstance(value, datetime_safe.time):
value_repr = "datetime.%s" % value_repr
return value_repr, {"import datetime"}
# Settings references
elif isinstance(value, SettingsReference):
return "settings.%s" % value.setting_name, {"from django.conf import settings"}
# Simple types
elif isinstance(value, float):
if math.isnan(value) or math.isinf(value):
return 'float("{}")'.format(value), set()
return repr(value), set()
elif isinstance(value, six.integer_types + (bool, type(None))):
return repr(value), set()
elif isinstance(value, six.binary_type):
value_repr = repr(value)
if six.PY2:
# Prepend the `b` prefix since we're importing unicode_literals
value_repr = 'b' + value_repr
return value_repr, set()
elif isinstance(value, six.text_type):
value_repr = repr(value)
if six.PY2:
# Strip the `u` prefix since we're importing unicode_literals
value_repr = value_repr[1:]
return value_repr, set()
# Decimal
elif isinstance(value, decimal.Decimal):
return repr(value), {"from decimal import Decimal"}
# Django fields
elif isinstance(value, models.Field):
attr_name, path, args, kwargs = value.deconstruct()
return cls.serialize_deconstructed(path, args, kwargs)
# Classes
elif isinstance(value, type):
special_cases = [
(models.Model, "models.Model", []),
]
for case, string, imports in special_cases:
if case is value:
return string, set(imports)
if hasattr(value, "__module__"):
module = value.__module__
if module == six.moves.builtins.__name__:
return value.__name__, set()
else:
return "%s.%s" % (module, value.__name__), {"import %s" % module}
elif isinstance(value, models.manager.BaseManager):
as_manager, manager_path, qs_path, args, kwargs = value.deconstruct()
if as_manager:
name, imports = cls._serialize_path(qs_path)
return "%s.as_manager()" % name, imports
else:
return cls.serialize_deconstructed(manager_path, args, kwargs)
# Anything that knows how to deconstruct itself.
elif hasattr(value, 'deconstruct'):
return cls.serialize_deconstructed(*value.deconstruct())
# Functions
elif isinstance(value, (types.FunctionType, types.BuiltinFunctionType)):
# @classmethod?
if getattr(value, "__self__", None) and isinstance(value.__self__, type):
klass = value.__self__
module = klass.__module__
return "%s.%s.%s" % (module, klass.__name__, value.__name__), {"import %s" % module}
# Further error checking
if value.__name__ == '<lambda>':
raise ValueError("Cannot serialize function: lambda")
if value.__module__ is None:
raise ValueError("Cannot serialize function %r: No module" % value)
# Python 3 is a lot easier, and only uses this branch if it's not local.
if getattr(value, "__qualname__", None) and getattr(value, "__module__", None):
if "<" not in value.__qualname__: # Qualname can include <locals>
return "%s.%s" % (value.__module__, value.__qualname__), {"import %s" % value.__module__}
# Python 2/fallback version
module_name = value.__module__
# Make sure it's actually there and not an unbound method
module = import_module(module_name)
if not hasattr(module, value.__name__):
raise ValueError(
"Could not find function %s in %s.\n"
"Please note that due to Python 2 limitations, you cannot "
"serialize unbound method functions (e.g. a method "
"declared and used in the same class body). Please move "
"the function into the main module body to use migrations.\n"
"For more information, see "
"https://docs.djangoproject.com/en/%s/topics/migrations/#serializing-values"
% (value.__name__, module_name, get_docs_version()))
return "%s.%s" % (module_name, value.__name__), {"import %s" % module_name}
# Other iterables
elif isinstance(value, collections.Iterable):
imports = set()
strings = []
for item in value:
item_string, item_imports = cls.serialize(item)
imports.update(item_imports)
strings.append(item_string)
# When len(strings)==0, the empty iterable should be serialized as
# "()", not "(,)" because (,) is invalid Python syntax.
format = "(%s)" if len(strings) != 1 else "(%s,)"
return format % (", ".join(strings)), imports
# Compiled regex
elif isinstance(value, COMPILED_REGEX_TYPE):
imports = {"import re"}
regex_pattern, pattern_imports = cls.serialize(value.pattern)
regex_flags, flag_imports = cls.serialize(value.flags)
imports.update(pattern_imports)
imports.update(flag_imports)
args = [regex_pattern]
if value.flags:
args.append(regex_flags)
return "re.compile(%s)" % ', '.join(args), imports
# Uh oh.
else:
raise ValueError(
"Cannot serialize: %r\nThere are some values Django cannot serialize into "
"migration files.\nFor more, see https://docs.djangoproject.com/en/%s/"
"topics/migrations/#migration-serializing" % (value, get_docs_version())
)
0
Example 28
Project: pyactiveresource Source File: util.py
def xml_to_dict(xmlobj, saveroot=True):
"""Parse the xml into a dictionary of attributes.
Args:
xmlobj: An ElementTree element or an xml string.
saveroot: Keep the xml element names (ugly format)
Returns:
An ElementDict object or ElementList for multiple objects
"""
if isinstance(xmlobj, (six.text_type, six.binary_type)):
# Allow for blank (usually HEAD) result on success
if xmlobj.isspace():
return {}
try:
element = ET.fromstring(xmlobj)
except Exception as err:
raise Error('Unable to parse xml data: %s' % err)
else:
element = xmlobj
element_type = element.get('type', '').lower()
if element_type == 'array':
element_list_type = element.tag.replace('-', '_')
return_list = element_containers.ElementList(element_list_type)
for child in element.getchildren():
return_list.append(xml_to_dict(child, saveroot=False))
if saveroot:
return element_containers.ElementDict(element_list_type,
{element_list_type:
return_list})
else:
return return_list
elif element.get('nil') == 'true':
return None
elif element_type in ('integer', 'datetime', 'date',
'decimal', 'double', 'float') and not element.text:
return None
elif element_type == 'integer':
return int(element.text)
elif element_type == 'datetime':
if date_parse:
return date_parse(element.text)
else:
try:
timestamp = calendar.timegm(
time.strptime(element.text, '%Y-%m-%dT%H:%M:%S+0000'))
return datetime.datetime.utcfromtimestamp(timestamp)
except ValueError as err:
raise Error('Unable to parse timestamp. Install dateutil'
' (http://labix.org/python-dateutil) or'
' pyxml (http://pyxml.sf.net/topics/)'
' for ISO8601 support.')
elif element_type == 'date':
time_tuple = time.strptime(element.text, '%Y-%m-%d')
return datetime.date(*time_tuple[:3])
elif element_type == 'decimal':
return decimal.Decimal(element.text)
elif element_type in ('float', 'double'):
return float(element.text)
elif element_type == 'boolean':
if not element.text:
return False
return element.text.strip() in ('true', '1')
elif element_type == 'yaml':
if not yaml:
raise ImportError('PyYaml is not installed: http://pyyaml.org/')
return yaml.safe_load(element.text)
elif element_type == 'base64binary':
return base64.decodestring(element.text.encode('ascii'))
elif element_type == 'file':
content_type = element.get('content_type',
'application/octet-stream')
filename = element.get('name', 'untitled')
return FileObject(element.text, filename, content_type)
elif element_type in ('symbol', 'string'):
if not element.text:
return ''
return element.text
elif element.getchildren():
# This is an element with children. The children might be simple
# values, or nested hashes.
if element_type:
attributes = element_containers.ElementDict(
underscore(element.get('type', '')), element.items())
else:
attributes = element_containers.ElementDict(singularize(
element.tag.replace('-', '_')), element.items())
for child in element.getchildren():
attribute = xml_to_dict(child, saveroot=False)
child_tag = child.tag.replace('-', '_')
# Handle multiple elements with the same tag name
if child_tag in attributes:
if isinstance(attributes[child_tag], list):
attributes[child_tag].append(attribute)
else:
attributes[child_tag] = [attributes[child_tag],
attribute]
else:
attributes[child_tag] = attribute
if saveroot:
return {element.tag.replace('-', '_'): attributes}
else:
return attributes
elif element.items():
return element_containers.ElementDict(element.tag.replace('-', '_'),
element.items())
else:
return element.text
0
Example 29
Project: pyafipws Source File: pyrece.py
def on_btnAutorizarLote_click(self, event):
self.verifica_ws()
if not self.items: return
try:
#getcontext().prec = 2
ok = 0
rechazadas = 0
cols = self.cols
items = []
self.progreso(0)
cbt_desde = cbt_hasta = None
datos = {
'tipo_cbte': None,
'punto_vta': None,
'fecha_cbte': None,
'fecha_venc_pago': None,
'fecha_cbte': None,
'fecha_venc_pago': None,
'fecha_serv_desde': None,
'fecha_serv_hasta': None,
'moneda_id': None,
'moneda_ctz': None,
'id': None,
}
importes = {
'imp_total': Decimal(0),
'imp_tot_conc': Decimal(0),
'imp_neto': Decimal(0),
'imp_iva':Decimal(0),
'imp_op_ex': Decimal(0),
'imp_trib': Decimal(0),
}
for l in range(1,5):
k = 'iva_%%s_%s' % l
datos[k % 'id'] = None
importes[k % 'base_imp'] = Decimal(0)
importes[k % 'importe'] = Decimal(0)
for l in range(1,10):
k = 'tributo_%%s_%s' % l
datos[k % 'id'] = None
datos[k % 'desc'] = None
importes[k % 'base_imp'] = Decimal(0)
datos[k % 'alic'] = None
importes[k % 'importe'] = Decimal(0)
for i, item in self.get_selected_items():
if cbt_desde is None or int(item['cbt_numero']) < cbt_desde:
cbt_desde = int(item['cbt_numero'])
if cbt_hasta is None or int(item['cbt_numero']) > cbt_hasta:
cbt_hasta = int(item['cbt_numero'])
for key in item:
if key in datos:
if datos[key] is None:
datos[key] = item[key]
elif datos[key] != item[key]:
raise RuntimeError(u"%s tiene valores distintos en el lote!" % key)
if key in importes and item[key]:
importes[key] = importes[key] + Decimal("%.2f" % float(str(item[key].replace(",","."))))
kargs = {'cbt_desde': cbt_desde, 'cbt_hasta': cbt_hasta}
kargs.update({'tipo_doc': 99, 'nro_doc': '0'})
kargs.update(datos)
kargs.update(importes)
if kargs['fecha_serv_desde'] and kargs['fecha_serv_hasta']:
kargs['presta_serv'] = 1
kargs['concepto'] = 2
else:
kargs['presta_serv'] = 0
kargs['concepto'] = 1
del kargs['fecha_serv_desde']
del kargs['fecha_serv_hasta']
for key, val in importes.items():
importes[key] = val.quantize(Decimal('.01'), rounding=ROUND_DOWN)
if 'id' not in kargs or kargs['id'] == "":
id = long(kargs['cbt_desde'])
id += (int(kargs['tipo_cbte'])*10**4 + int(kargs['punto_vta']))*10**8
kargs['id'] = id
if DEBUG:
self.log('\n'.join(["%s='%s'" % (k,v) for k,v in kargs.items()]))
if '--test' in sys.argv:
kargs['cbt_desde'] = 777
kargs['fecha_cbte'] = '20110802'
kargs['fecha_venc_pago'] = '20110831'
if gui.confirm("Confirma Lote:\n"
"Tipo: %(tipo_cbte)s Desde: %(cbt_desde)s Hasta %(cbt_hasta)s\n"
"Neto: %(imp_neto)s IVA: %(imp_iva)s Trib.: %(imp_trib)s Total: %(imp_total)s"
% kargs, "Autorizar lote:"):
if self.webservice == 'wsfev1':
encabezado = {}
for k in ('concepto', 'tipo_doc', 'nro_doc', 'tipo_cbte', 'punto_vta',
'cbt_desde', 'cbt_hasta', 'imp_total', 'imp_tot_conc', 'imp_neto',
'imp_iva', 'imp_trib', 'imp_op_ex', 'fecha_cbte',
'moneda_id', 'moneda_ctz'):
encabezado[k] = kargs[k]
for k in ('fecha_venc_pago', 'fecha_serv_desde', 'fecha_serv_hasta'):
if k in kargs:
encabezado[k] = kargs.get(k)
self.ws.CrearFactura(**encabezado)
for l in range(1,1000):
k = 'iva_%%s_%s' % l
if (k % 'id') in kargs:
id = kargs[k % 'id']
base_imp = kargs[k % 'base_imp']
importe = kargs[k % 'importe']
if id:
self.ws.AgregarIva(id, base_imp, importe)
else:
break
for l in range(1,1000):
k = 'tributo_%%s_%s' % l
if (k % 'id') in kargs:
id = kargs[k % 'id']
desc = kargs[k % 'desc']
base_imp = kargs[k % 'base_imp']
alic = kargs[k % 'alic']
importe = kargs[k % 'importe']
if id:
self.ws.AgregarTributo(id, desc, base_imp, alic, importe)
else:
break
if DEBUG:
self.log('\n'.join(["%s='%s'" % (k,v) for k,v in self.ws.factura.items()]))
cae = self.ws.CAESolicitar()
kargs.update({
'cae': self.ws.CAE,
'fecha_vto': self.ws.Vencimiento,
'resultado': self.ws.Resultado,
'motivo': self.ws.Obs,
'reproceso': self.ws.Reproceso,
'err_code': self.ws.ErrCode.encode("latin1"),
'err_msg': self.ws.ErrMsg.encode("latin1"),
})
if self.ws.ErrMsg:
gui.alert(self.ws.ErrMsg, "Error AFIP")
if self.ws.Obs and self.ws.Obs!='00':
gui.alert(self.ws.Obs, u"Observación AFIP")
for i, item in self.get_selected_items():
for key in ('id', 'cae', 'fecha_vto', 'resultado', 'motivo', 'reproceso', 'err_code', 'err_msg'):
item[key] = kargs[key] if kargs[key] is not None else ""
self.items[i] = item
self.log("ID: %s CAE: %s Motivo: %s Reproceso: %s" % (kargs['id'], kargs['cae'], kargs['motivo'],kargs['reproceso']))
if kargs['resultado'] == "R":
rechazadas += 1
elif kargs['resultado'] == "A":
ok += 1
self.items = self.items # refrescar, ver de corregir
self.progreso(len(self.items))
gui.alert('Proceso finalizado OK!\n\nAceptadas: %d\nRechazadas: %d' % (ok, rechazadas), 'Autorización')
self.grabar()
except SoapFault,e:
self.log(self.client.xml_request)
self.log(self.client.xml_response)
self.error(e.faultcode, e.faultstring.encode("ascii","ignore"))
except Exception, e:
self.error(u'Excepción',unicode(e))
0
Example 30
Project: SickGear Source File: encoder.py
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
_key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
_use_decimal, _namedtuple_as_object, _tuple_as_array,
_int_as_string_bitcount, _item_sort_key,
_encoding,_for_json,
_iterable_as_array,
## HACK: hand-optimized bytecode; turn globals into locals
_PY3=PY3,
ValueError=ValueError,
string_types=string_types,
Decimal=None,
dict=dict,
float=float,
id=id,
integer_types=integer_types,
isinstance=isinstance,
list=list,
str=str,
tuple=tuple,
iter=iter,
):
if _use_decimal and Decimal is None:
Decimal = decimal.Decimal
if _item_sort_key and not callable(_item_sort_key):
raise TypeError("item_sort_key must be None or callable")
elif _sort_keys and not _item_sort_key:
_item_sort_key = itemgetter(0)
if (_int_as_string_bitcount is not None and
(_int_as_string_bitcount <= 0 or
not isinstance(_int_as_string_bitcount, integer_types))):
raise TypeError("int_as_string_bitcount must be a positive integer")
def _encode_int(value):
skip_quoting = (
_int_as_string_bitcount is None
or
_int_as_string_bitcount < 1
)
if type(value) not in integer_types:
# See #118, do not trust custom str/repr
value = int(value)
if (
skip_quoting or
(-1 << _int_as_string_bitcount)
< value <
(1 << _int_as_string_bitcount)
):
return str(value)
return '"' + str(value) + '"'
def _iterencode_list(lst, _current_indent_level):
if not lst:
yield '[]'
return
if markers is not None:
markerid = id(lst)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = lst
buf = '['
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (_indent * _current_indent_level)
separator = _item_separator + newline_indent
buf += newline_indent
else:
newline_indent = None
separator = _item_separator
first = True
for value in lst:
if first:
first = False
else:
buf = separator
if (isinstance(value, string_types) or
(_PY3 and isinstance(value, binary_type))):
yield buf + _encoder(value)
elif value is None:
yield buf + 'null'
elif value is True:
yield buf + 'true'
elif value is False:
yield buf + 'false'
elif isinstance(value, integer_types):
yield buf + _encode_int(value)
elif isinstance(value, float):
yield buf + _floatstr(value)
elif _use_decimal and isinstance(value, Decimal):
yield buf + str(value)
else:
yield buf
for_json = _for_json and getattr(value, 'for_json', None)
if for_json and callable(for_json):
chunks = _iterencode(for_json(), _current_indent_level)
elif isinstance(value, list):
chunks = _iterencode_list(value, _current_indent_level)
else:
_asdict = _namedtuple_as_object and getattr(value, '_asdict', None)
if _asdict and callable(_asdict):
chunks = _iterencode_dict(_asdict(),
_current_indent_level)
elif _tuple_as_array and isinstance(value, tuple):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (_indent * _current_indent_level)
yield ']'
if markers is not None:
del markers[markerid]
def _stringify_key(key):
if isinstance(key, string_types): # pragma: no cover
pass
elif isinstance(key, binary_type):
key = key.decode(_encoding)
elif isinstance(key, float):
key = _floatstr(key)
elif key is True:
key = 'true'
elif key is False:
key = 'false'
elif key is None:
key = 'null'
elif isinstance(key, integer_types):
if type(key) not in integer_types:
# See #118, do not trust custom str/repr
key = int(key)
key = str(key)
elif _use_decimal and isinstance(key, Decimal):
key = str(key)
elif _skipkeys:
key = None
else:
raise TypeError("key " + repr(key) + " is not a string")
return key
def _iterencode_dict(dct, _current_indent_level):
if not dct:
yield '{}'
return
if markers is not None:
markerid = id(dct)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = dct
yield '{'
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (_indent * _current_indent_level)
item_separator = _item_separator + newline_indent
yield newline_indent
else:
newline_indent = None
item_separator = _item_separator
first = True
if _PY3:
iteritems = dct.items()
else:
iteritems = dct.iteritems()
if _item_sort_key:
items = []
for k, v in dct.items():
if not isinstance(k, string_types):
k = _stringify_key(k)
if k is None:
continue
items.append((k, v))
items.sort(key=_item_sort_key)
else:
items = iteritems
for key, value in items:
if not (_item_sort_key or isinstance(key, string_types)):
key = _stringify_key(key)
if key is None:
# _skipkeys must be True
continue
if first:
first = False
else:
yield item_separator
yield _encoder(key)
yield _key_separator
if (isinstance(value, string_types) or
(_PY3 and isinstance(value, binary_type))):
yield _encoder(value)
elif value is None:
yield 'null'
elif value is True:
yield 'true'
elif value is False:
yield 'false'
elif isinstance(value, integer_types):
yield _encode_int(value)
elif isinstance(value, float):
yield _floatstr(value)
elif _use_decimal and isinstance(value, Decimal):
yield str(value)
else:
for_json = _for_json and getattr(value, 'for_json', None)
if for_json and callable(for_json):
chunks = _iterencode(for_json(), _current_indent_level)
elif isinstance(value, list):
chunks = _iterencode_list(value, _current_indent_level)
else:
_asdict = _namedtuple_as_object and getattr(value, '_asdict', None)
if _asdict and callable(_asdict):
chunks = _iterencode_dict(_asdict(),
_current_indent_level)
elif _tuple_as_array and isinstance(value, tuple):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (_indent * _current_indent_level)
yield '}'
if markers is not None:
del markers[markerid]
def _iterencode(o, _current_indent_level):
if (isinstance(o, string_types) or
(_PY3 and isinstance(o, binary_type))):
yield _encoder(o)
elif o is None:
yield 'null'
elif o is True:
yield 'true'
elif o is False:
yield 'false'
elif isinstance(o, integer_types):
yield _encode_int(o)
elif isinstance(o, float):
yield _floatstr(o)
else:
for_json = _for_json and getattr(o, 'for_json', None)
if for_json and callable(for_json):
for chunk in _iterencode(for_json(), _current_indent_level):
yield chunk
elif isinstance(o, list):
for chunk in _iterencode_list(o, _current_indent_level):
yield chunk
else:
_asdict = _namedtuple_as_object and getattr(o, '_asdict', None)
if _asdict and callable(_asdict):
for chunk in _iterencode_dict(_asdict(),
_current_indent_level):
yield chunk
elif (_tuple_as_array and isinstance(o, tuple)):
for chunk in _iterencode_list(o, _current_indent_level):
yield chunk
elif isinstance(o, dict):
for chunk in _iterencode_dict(o, _current_indent_level):
yield chunk
elif _use_decimal and isinstance(o, Decimal):
yield str(o)
else:
while _iterable_as_array:
# Markers are not checked here because it is valid for
# an iterable to return self.
try:
o = iter(o)
except TypeError:
break
for chunk in _iterencode_list(o, _current_indent_level):
yield chunk
return
if markers is not None:
markerid = id(o)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = o
o = _default(o)
for chunk in _iterencode(o, _current_indent_level):
yield chunk
if markers is not None:
del markers[markerid]
return _iterencode
0
Example 31
Project: pretix Source File: invoices.py
def _invoice_generate_german(invoice, f):
_invoice_register_fonts()
styles = _invoice_get_stylesheet()
pagesize = pagesizes.A4
def on_page(canvas, doc):
canvas.saveState()
canvas.setFont('OpenSans', 8)
canvas.drawRightString(pagesize[0] - 20 * mm, 10 * mm, _("Page %d") % (doc.page,))
for i, line in enumerate(invoice.footer_text.split('\n')[::-1]):
canvas.drawCentredString(pagesize[0] / 2, 25 + (3.5 * i) * mm, line.strip())
canvas.restoreState()
def on_first_page(canvas, doc):
canvas.setCreator('pretix.eu')
canvas.setTitle(pgettext('invoice', 'Invoice {num}').format(num=invoice.number))
canvas.saveState()
canvas.setFont('OpenSans', 8)
canvas.drawRightString(pagesize[0] - 20 * mm, 10 * mm, _("Page %d") % (doc.page,))
for i, line in enumerate(invoice.footer_text.split('\n')[::-1]):
canvas.drawCentredString(pagesize[0] / 2, 25 + (3.5 * i) * mm, line.strip())
textobject = canvas.beginText(25 * mm, (297 - 15) * mm)
textobject.setFont('OpenSansBd', 8)
textobject.textLine(pgettext('invoice', 'Invoice from').upper())
textobject.moveCursor(0, 5)
textobject.setFont('OpenSans', 10)
textobject.textLines(invoice.invoice_from.strip())
canvas.drawText(textobject)
textobject = canvas.beginText(25 * mm, (297 - 50) * mm)
textobject.setFont('OpenSansBd', 8)
textobject.textLine(pgettext('invoice', 'Invoice to').upper())
textobject.moveCursor(0, 5)
textobject.setFont('OpenSans', 10)
textobject.textLines(invoice.invoice_to.strip())
canvas.drawText(textobject)
textobject = canvas.beginText(125 * mm, (297 - 50) * mm)
textobject.setFont('OpenSansBd', 8)
if invoice.is_cancellation:
textobject.textLine(pgettext('invoice', 'Cancellation number').upper())
textobject.moveCursor(0, 5)
textobject.setFont('OpenSans', 10)
textobject.textLine(invoice.number)
textobject.moveCursor(0, 5)
textobject.setFont('OpenSansBd', 8)
textobject.textLine(pgettext('invoice', 'Original invoice').upper())
textobject.moveCursor(0, 5)
textobject.setFont('OpenSans', 10)
textobject.textLine(invoice.refers.number)
else:
textobject.textLine(pgettext('invoice', 'Invoice number').upper())
textobject.moveCursor(0, 5)
textobject.setFont('OpenSans', 10)
textobject.textLine(invoice.number)
textobject.moveCursor(0, 5)
if invoice.is_cancellation:
textobject.setFont('OpenSansBd', 8)
textobject.textLine(pgettext('invoice', 'Cancellation date').upper())
textobject.moveCursor(0, 5)
textobject.setFont('OpenSans', 10)
textobject.textLine(date_format(invoice.date, "DATE_FORMAT"))
textobject.moveCursor(0, 5)
textobject.setFont('OpenSansBd', 8)
textobject.textLine(pgettext('invoice', 'Original invoice date').upper())
textobject.moveCursor(0, 5)
textobject.setFont('OpenSans', 10)
textobject.textLine(date_format(invoice.refers.date, "DATE_FORMAT"))
textobject.moveCursor(0, 5)
else:
textobject.setFont('OpenSansBd', 8)
textobject.textLine(pgettext('invoice', 'Invoice date').upper())
textobject.moveCursor(0, 5)
textobject.setFont('OpenSans', 10)
textobject.textLine(date_format(invoice.date, "DATE_FORMAT"))
textobject.moveCursor(0, 5)
canvas.drawText(textobject)
textobject = canvas.beginText(165 * mm, (297 - 50) * mm)
textobject.setFont('OpenSansBd', 8)
textobject.textLine(_('Order code').upper())
textobject.moveCursor(0, 5)
textobject.setFont('OpenSans', 10)
textobject.textLine(invoice.order.full_code)
textobject.moveCursor(0, 5)
textobject.setFont('OpenSansBd', 8)
textobject.textLine(_('Order date').upper())
textobject.moveCursor(0, 5)
textobject.setFont('OpenSans', 10)
textobject.textLine(date_format(invoice.order.datetime, "DATE_FORMAT"))
canvas.drawText(textobject)
textobject = canvas.beginText(125 * mm, (297 - 15) * mm)
textobject.setFont('OpenSansBd', 8)
textobject.textLine(_('Event').upper())
textobject.moveCursor(0, 5)
textobject.setFont('OpenSans', 10)
textobject.textLine(str(invoice.event.name))
if invoice.event.settings.show_date_to:
textobject.textLines(
_('{from_date}\nuntil {to_date}').format(from_date=invoice.event.get_date_from_display(),
to_date=invoice.event.get_date_to_display()))
else:
textobject.textLine(invoice.event.get_date_from_display())
canvas.drawText(textobject)
canvas.restoreState()
doc = BaseDocTemplate(f.name, pagesize=pagesizes.A4,
leftMargin=25 * mm, rightMargin=20 * mm,
topMargin=20 * mm, bottomMargin=15 * mm)
footer_length = 3.5 * len(invoice.footer_text.split('\n')) * mm
frames_p1 = [
Frame(doc.leftMargin, doc.bottomMargin, doc.width, doc.height - 75 * mm,
leftPadding=0, rightPadding=0, topPadding=0, bottomPadding=footer_length,
id='normal')
]
frames = [
Frame(doc.leftMargin, doc.bottomMargin, doc.width, doc.height,
leftPadding=0, rightPadding=0, topPadding=0, bottomPadding=footer_length,
id='normal')
]
doc.addPageTemplates([
PageTemplate(id='FirstPage', frames=frames_p1, onPage=on_first_page, pagesize=pagesize),
PageTemplate(id='OtherPages', frames=frames, onPage=on_page, pagesize=pagesize)
])
story = [
NextPageTemplate('FirstPage'),
Paragraph(pgettext('invoice', 'Invoice')
if not invoice.is_cancellation
else pgettext('invoice', 'Cancellation'),
styles['Heading1']),
Spacer(1, 5 * mm),
NextPageTemplate('OtherPages'),
]
if invoice.introductory_text:
story.append(Paragraph(invoice.introductory_text, styles['Normal']))
story.append(Spacer(1, 10 * mm))
taxvalue_map = defaultdict(Decimal)
grossvalue_map = defaultdict(Decimal)
tstyledata = [
('ALIGN', (1, 0), (-1, -1), 'RIGHT'),
('FONTNAME', (0, 0), (-1, 0), 'OpenSansBd'),
('FONTNAME', (0, -1), (-1, -1), 'OpenSansBd'),
('LEFTPADDING', (0, 0), (0, -1), 0),
('RIGHTPADDING', (-1, 0), (-1, -1), 0),
]
tdata = [(
pgettext('invoice', 'Description'),
pgettext('invoice', 'Tax rate'),
pgettext('invoice', 'Net'),
pgettext('invoice', 'Gross'),
)]
total = Decimal('0.00')
for line in invoice.lines.all():
tdata.append((
line.description,
lformat("%.2f", line.tax_rate) + " %",
lformat("%.2f", line.net_value) + " " + invoice.event.currency,
lformat("%.2f", line.gross_value) + " " + invoice.event.currency,
))
taxvalue_map[line.tax_rate] += line.tax_value
grossvalue_map[line.tax_rate] += line.gross_value
total += line.gross_value
tdata.append([pgettext('invoice', 'Invoice total'), '', '', lformat("%.2f", total) + " " + invoice.event.currency])
colwidths = [a * doc.width for a in (.55, .15, .15, .15)]
table = Table(tdata, colWidths=colwidths, repeatRows=1)
table.setStyle(TableStyle(tstyledata))
story.append(table)
story.append(Spacer(1, 15 * mm))
if invoice.payment_provider_text:
story.append(Paragraph(invoice.payment_provider_text, styles['Normal']))
if invoice.additional_text:
story.append(Paragraph(invoice.additional_text, styles['Normal']))
story.append(Spacer(1, 15 * mm))
tstyledata = [
('SPAN', (1, 0), (-1, 0)),
('ALIGN', (2, 1), (-1, -1), 'RIGHT'),
('LEFTPADDING', (0, 0), (0, -1), 0),
('RIGHTPADDING', (-1, 0), (-1, -1), 0),
('FONTSIZE', (0, 0), (-1, -1), 8),
]
tdata = [('', pgettext('invoice', 'Included taxes'), '', '', ''),
('', pgettext('invoice', 'Tax rate'),
pgettext('invoice', 'Net value'), pgettext('invoice', 'Gross value'), pgettext('invoice', 'Tax'))]
for rate, gross in grossvalue_map.items():
if line.tax_rate == 0:
continue
tax = taxvalue_map[rate]
tdata.append((
'',
lformat("%.2f", rate) + " %",
lformat("%.2f", (gross - tax)) + " " + invoice.event.currency,
lformat("%.2f", gross) + " " + invoice.event.currency,
lformat("%.2f", tax) + " " + invoice.event.currency,
))
if len(tdata) > 2:
colwidths = [a * doc.width for a in (.45, .10, .15, .15, .15)]
table = Table(tdata, colWidths=colwidths, repeatRows=2)
table.setStyle(TableStyle(tstyledata))
story.append(table)
doc.build(story)
return doc
0
Example 32
def create_app(mode, configs=None, log_level=None, **kwargs):
# Allow configuration information to be specified with enviroment vars
env_configs = {}
for key in os.environ:
if key.startswith('SIMPLECOIN_CONFIG'):
env_configs[key] = os.environ[key]
env_configs = [env_configs[value] for value in sorted(env_configs)]
configs = ['defaults.toml'] + (env_configs or []) + (configs or [])
if len(configs) == 1:
print("Unable to start with only the default config values! {}"
.format(configs))
exit(2)
config_vars = {}
for config in configs:
if isinstance(config, basestring):
if os.path.isabs(config):
config_path = config
else:
config_path = os.path.join(root, config)
config = open(config_path)
updates = toml.loads(config.read())
toml.toml_merge_dict(config_vars, updates)
# Initialize our flask application
# =======================================================================
app = Flask(__name__, static_folder='../static', static_url_path='/static')
app.jinja_loader = FileSystemLoader(os.path.join(root, 'templates'))
# Objectizes all configurations
# =======================================================================
ConfigChecker(config_vars, app)
# Setup logging
# =======================================================================
del app.logger.handlers[0]
app.logger.setLevel(logging.NOTSET)
log_format = logging.Formatter('%(asctime)s [%(name)s] [%(levelname)s]: %(message)s')
log_level = getattr(logging, str(log_level), app.config['log_level'])
logger = logging.getLogger()
logger.setLevel(log_level)
handler = logging.StreamHandler(stream=sys.stdout)
handler.setFormatter(log_format)
logger.addHandler(handler)
# Handle optionally adding log file writers for each different run mode
# =======================================================================
if mode == "manage" and app.config['manage_log_file']:
hdlr = logging.FileHandler(app.config['manage_log_file'])
hdlr.setFormatter(log_format)
logger.addHandler(hdlr)
if mode == "scheduler" and app.config['scheduler_log_file']:
hdlr = logging.FileHandler(app.config['scheduler_log_file'])
hdlr.setFormatter(log_format)
logger.addHandler(hdlr)
if mode == "webserver" and app.config['webserver_log_file']:
hdlr = logging.FileHandler(app.config['webserver_log_file'])
hdlr.setFormatter(log_format)
logger.addHandler(hdlr)
logging.getLogger("gunicorn.access").setLevel(logging.WARN)
logging.getLogger("requests.packages.urllib3.connectionpool").setLevel(logging.INFO)
# Add the debug toolbar if we're in debug mode
# =======================================================================
if app.config['DEBUG'] and mode == "webserver":
# Log all stdout and stderr when in debug mode for convenience
class LoggerWriter:
def __init__(self, logger, level):
self.logger = logger
self.level = level
def write(self, message):
if message != '\n':
self.logger.log(self.level, message)
sys.stdout = LoggerWriter(app.logger, logging.DEBUG)
sys.stderr = LoggerWriter(app.logger, logging.DEBUG)
# Register the powerpool datastore + Cache
# =======================================================================
db.init_app(app)
babel.init_app(app)
app.config['BABEL_DEFAULT_LOCALE'] = app.config.get('default_locale')
def configure_redis(config):
typ = config.pop('type')
if typ == "mock_redis":
from mockredis import mock_redis_client
return mock_redis_client()
return Redis(**config)
cache_config = app.config.get('main_cache', dict(type='live'))
cache_redis = configure_redis(cache_config)
ds_config = app.config.get('redis_conn', dict(type='live'))
ds_redis = configure_redis(ds_config)
# Take advantage of the fact that werkzeug lets the host kwargs be a Redis
# compatible object
cache.init_app(app, config=dict(CACHE_TYPE='redis', CACHE_REDIS_HOST=cache_redis))
app.redis = ds_redis
sentry = False
if app.config.get('sentry'):
try:
from raven.contrib.flask import Sentry
sentry = Sentry()
except Exception:
app.logger.error("Unable to initialize sentry!")
# Helpful global vars
# =======================================================================
app.SATOSHI = Decimal('0.00000001')
app.MAX_DECIMALS = 28
# Configure app for running manage.py functions
# =======================================================================
if mode == "manage" or mode == "webserver":
# Dynamically add all the filters in the filters.py file
for name, func in inspect.getmembers(filters, inspect.isfunction):
app.jinja_env.filters[name] = func
if mode == "manage":
# Initialize the migration settings
Migrate(app, db)
# Disable for management mode
if sentry:
sentry = False
# Configure app for serving web content
# =======================================================================
elif mode == "webserver":
# try and fetch the git version information
try:
output = subprocess.check_output("git show -s --format='%ci %h'",
shell=True).strip().rsplit(" ", 1)
app.config['hash'] = output[1]
app.config['revdate'] = output[0]
# celery won't work with this, so set some default
except Exception:
app.config['hash'] = ''
app.config['revdate'] = ''
app.logger.info("Starting up SimpleCoin!\n{}".format("=" * 100))
# Configure app for running scheduler.py functions + instantiate scheduler
# =======================================================================
elif mode == "scheduler":
if sentry and 'SENTRY_NAME' in app.config:
app.config['SENTRY_NAME'] = app.config['SENTRY_NAME'] + "_scheduler"
app.logger.info("=" * 80)
app.logger.info("SimpleCoin cron scheduler starting up...")
setproctitle.setproctitle("simplecoin_scheduler")
sched = Scheduler(standalone=True)
# monkey patch the scheduler to wrap each job call in its own flask
# context. Kind of sloppy way to pass in the app context...
Scheduler.app = app
Scheduler._old_run_job = Scheduler._run_job
def _run_job(self, *args, **kwargs):
with self.app.app_context():
Scheduler._old_run_job(self, *args, **kwargs)
Scheduler._run_job = _run_job
stage_tasks = set(["cache_profitability", "leaderboard",
"server_status", "update_network",
"cache_user_donation", "update_online_workers"])
for task_config in app.config['tasks']:
if not task_config.get('enabled', False):
continue
if app.config['stage'] and task_config['name'] not in stage_tasks:
app.logger.debug(
"Skipping scheduling {} because in stage mode!"
.format(task_config['name']))
continue
stripped_config = task_config.copy()
del stripped_config['enabled']
task = getattr(sch, task_config['name'])
sched.add_cron_job(task, **stripped_config)
app.scheduler = sched
if sentry:
sentry.init_app(app, logging=True, level=logging.ERROR)
# Route registration
# =======================================================================
from . import views, models, api, rpc_views
app.register_blueprint(views.main)
app.register_blueprint(rpc_views.rpc_views)
app.register_blueprint(api.api, url_prefix='/api')
return app
0
Example 33
Project: pyafipws Source File: wslpg.py
def test_ajuste_contrato(self, nro_contrato=27):
"Prueba de ajuste por contrato de una liquidación de granos (WSLPGv1.4)"
wslpg = self.wslpg
# solicito una liquidación para tener el COE autorizado a ajustar:
self.test_liquidacion_contrato(nro_contrato)
coe_ajustado = wslpg.COE
# solicito el último nro de orden para la nueva liquidación de ajuste:
pto_emision = 55
ok = wslpg.ConsultarUltNroOrden(pto_emision)
self.assertTrue(ok)
nro_orden = wslpg.NroOrden + 1
wslpg.CrearAjusteBase(pto_emision=55, nro_orden=nro_orden,
nro_contrato=nro_contrato,
coe_ajustado=coe_ajustado,
nro_act_comprador=40,
cod_grano=31,
cuit_vendedor=23000000019,
cuit_comprador=20400000000,
cuit_corredor=20267565393,
precio_ref_tn=100,
cod_grado_ent="G1",
val_grado_ent=1.01,
precio_flete_tn=1000,
cod_puerto=14,
des_puerto_localidad="Desc Puerto",
cod_provincia=1,
cod_localidad=5,
)
wslpg.CrearAjusteCredito(
concepto_importe_iva_0='Ajuste IVA al 0%',
importe_ajustar_Iva_0=100,
)
wslpg.CrearAjusteDebito(
concepto_importe_iva_105='Ajuste IVA al 10.5%',
importe_ajustar_Iva_105=100,
)
wslpg.AgregarDeduccion(codigo_concepto="OD",
detalle_aclaratorio="Otras Deduc",
dias_almacenaje="1",
base_calculo=100.0,
alicuota=10.5, )
# autorizo el ajuste:
ok = wslpg.AjustarLiquidacionContrato()
self.assertTrue(ok)
# verificar respuesta general:
coe = wslpg.COE
self.assertIsInstance(wslpg.COE, basestring)
self.assertEqual(len(wslpg.COE), len("330100013133"))
try:
self.assertEqual(wslpg.Estado, "AC")
self.assertEqual(wslpg.Subtotal, Decimal("-100.00"))
self.assertEqual(wslpg.TotalIva105, Decimal("0"))
self.assertEqual(wslpg.TotalIva21, Decimal("0"))
self.assertEqual(wslpg.TotalRetencionesGanancias, Decimal("0"))
self.assertEqual(wslpg.TotalRetencionesIVA, Decimal("0"))
self.assertEqual(wslpg.TotalNetoAPagar, Decimal("-110.50"))
self.assertEqual(wslpg.TotalIvaRg2300_07, Decimal("0"))
self.assertEqual(wslpg.TotalPagoSegunCondicion, Decimal("-110.50"))
##self.assertEqual(wslpg.NroContrato, nro_contrato) # no devuelto AFIP
# verificar campos globales no docuementados (directamente desde el XML):
wslpg.AnalizarXml()
v = wslpg.ObtenerTagXml("totalesUnificados", "subTotalDebCred")
self.assertEqual(v, "0")
v = wslpg.ObtenerTagXml("totalesUnificados", "totalBaseDeducciones")
self.assertEqual(v, "100.0")
v = wslpg.ObtenerTagXml("totalesUnificados", "ivaDeducciones")
self.assertEqual(v, "10.50")
# verificar ajuste credito
ok = wslpg.AnalizarAjusteCredito()
self.assertTrue(ok)
self.assertEqual(wslpg.GetParametro("precio_operacion"), "0.000")
self.assertEqual(wslpg.GetParametro("total_peso_neto"), "0")
self.assertEqual(wslpg.TotalDeduccion, Decimal("0.000"))
self.assertEqual(wslpg.TotalPagoSegunCondicion, Decimal("0.000"))
self.assertEqual(float(wslpg.GetParametro("importe_iva")), 0.00)
self.assertEqual(float(wslpg.GetParametro("operacion_con_iva")), 0.00)
# verificar ajuste debito
ok = wslpg.AnalizarAjusteDebito()
self.assertTrue(ok)
self.assertEqual(float(wslpg.GetParametro("precio_operacion")), 0.00)
self.assertEqual(float(wslpg.GetParametro("total_peso_neto")), 0)
self.assertEqual(wslpg.TotalDeduccion, Decimal("110.50"))
self.assertEqual(wslpg.TotalPagoSegunCondicion, Decimal("-110.50"))
self.assertEqual(float(wslpg.GetParametro("importe_iva")), 0.00)
self.assertEqual(float(wslpg.GetParametro("operacion_con_iva")), 0.00)
self.assertEqual(float(wslpg.GetParametro("deducciones", 0, "importe_iva")), 10.50)
self.assertEqual(float(wslpg.GetParametro("deducciones", 0, "importe_deduccion")), 110.50)
finally:
# anulo el ajuste para evitar subsiguiente validación AFIP:
# 2105: No puede relacionar la liquidacion con el contrato, porque el contrato tiene un Ajuste realizado.
# 2106: No puede ajustar el contrato, porque tiene liquidaciones relacionadas con ajuste.
# anular primero el ajuste para evitar la validación de AFIP:
# 2108: No puede anular la liquidación porque está relacionada a un contrato con ajuste vigente.
if coe:
self.test_anular(coe)
if coe_ajustado:
self.test_anular(coe_ajustado) # anulo también el COE ajustado
0
Example 34
def __init__(self, sheet_width, sheet_height, columns, rows, label_width, label_height, **kwargs):
"""
Required parameters
-------------------
sheet_width, sheet_height: positive dimension
The size of the sheet.
columns, rows: positive integer
The number of labels on the sheet.
label_width, label_size: positive dimension
The size of each label.
Margins and gaps
----------------
left_margin: positive dimension
The gap between the left edge of the sheet and the first column.
column_gap: positive dimension
The internal gap between columns.
right_margin: positive dimension
The gap between the right edge of the sheet and the last column.
top_margin: positive dimension
The gap between the top edge of the sheet and the first row.
row_gap: positive dimension
The internal gap between rows.
bottom_margin: positive dimension
The gap between the bottom edge of the sheet and the last row.
Padding
-------
left_padding, right_padding, top_padding, bottom_padding: positive dimensions, default 0
The padding between the edges of the label and the area available
to draw on.
Corners
---------------------
corner_radius: positive dimension, default 0
Gives the labels rounded corners with the given radius.
padding_radius: positive dimension, default 0
Give the drawing area rounded corners. If there is no padding, this
must be set to zero.
Background
----------
background_image: reportlab.graphics.shape.Image
An image to use as the background to the page. This will be
automatically sized to fit the page; make sure it has the correct
aspect ratio.
background_filename: string
Filename of an image to use as a background to the page. If both
this and background_image are given, then background_image will
take precedence.
Raises
------
InvalidDimension
If any given dimension is invalid (i.e., the labels cannot fit on
the sheet).
"""
# Compulsory arguments.
self._sheet_width = Decimal(sheet_width)
self._sheet_height = Decimal(sheet_height)
self._columns = int(columns)
self._rows = int(rows)
self._label_width = Decimal(label_width)
self._label_height = Decimal(label_height)
# Optional arguments; missing ones will be computed later.
self._left_margin = kwargs.pop('left_margin', None)
self._column_gap = kwargs.pop('column_gap', None)
self._right_margin = kwargs.pop('right_margin', None)
self._top_margin = kwargs.pop('top_margin', None)
self._row_gap = kwargs.pop('row_gap', None)
self._bottom_margin = kwargs.pop('bottom_margin', None)
# Optional arguments with default values.
self._left_padding = kwargs.pop('left_padding', 0)
self._right_padding = kwargs.pop('right_padding', 0)
self._top_padding = kwargs.pop('top_padding', 0)
self._bottom_padding = kwargs.pop('bottom_padding', 0)
self._corner_radius = Decimal(kwargs.pop('corner_radius', 0))
self._padding_radius = Decimal(kwargs.pop('padding_radius', 0))
self._background_image = kwargs.pop('background_image', None)
self._background_filename = kwargs.pop('background_filename', None)
# Leftover arguments.
if kwargs:
args = kwargs.keys()
if len(args) == 1:
raise TypeError("Unknown keyword argument {}.".format(args[0]))
else:
raise TypeError("Unknown keyword arguments: {}.".format(', '.join(args)))
# Track which attributes have been automatically set.
self._autoset = set()
# Check all the dimensions etc are valid.
self._calculate()
0
Example 35
Project: djangae Source File: commands.py
def _build_query(self):
self._sanity_check()
queries = []
projection = self._exclude_pk(self.query.columns) or None
query_kwargs = {
"kind": self.query.concrete_model._meta.db_table,
"distinct": self.query.distinct or None,
"keys_only": self.keys_only or None,
"projection": projection,
"namespace": self.namespace,
}
ordering = convert_django_ordering_to_gae(self.query.order_by)
if self.query.distinct and not ordering:
# If we specified we wanted a distinct query, but we didn't specify
# an ordering, we must set the ordering to the distinct columns, otherwise
# App Engine shouts at us. Nastily. And without remorse.
# The order of the columns in `ordering` makes a difference, but `distinct` is a set
# and therefore unordered, but in this situation (where the ordering has not been
# explicitly defined) any order of the columns will do
ordering = list(self.query.columns)
# Deal with the no filters case
if self.query.where is None:
query = Query(
**query_kwargs
)
try:
query.Order(*ordering)
except datastore_errors.BadArgumentError as e:
raise NotSupportedError(e)
return query
assert self.query.where
# Go through the normalized query tree
for and_branch in self.query.where.children:
query = Query(
**query_kwargs
)
# This deals with the oddity that the root of the tree may well be a leaf
filters = [and_branch] if and_branch.is_leaf else and_branch.children
for filter_node in filters:
lookup = "{} {}".format(filter_node.column, filter_node.operator)
value = filter_node.value
# This is a special case. Annoyingly Django's decimal field doesn't
# ever call ops.get_prep_save or lookup or whatever when you are filtering
# on a query. It *does* do it on a save, so we basically need to do a
# conversion here, when really it should be handled elsewhere
if isinstance(value, decimal.Decimal):
field = get_field_from_column(self.query.model, filter_node.column)
value = self.connection.ops.adapt_decimalfield_value(value, field.max_digits, field.decimal_places)
elif isinstance(value, basestring):
value = coerce_unicode(value)
elif isinstance(value, datastore.Key):
# Make sure we apply the current namespace to any lookups
# by key. Fixme: if we ever add key properties this will break if
# someone is trying to filter on a key which has a different namespace
# to the active one.
value = datastore.Key.from_path(
value.kind(),
value.id_or_name(),
namespace=self.namespace
)
# If there is already a value for this lookup, we need to make the
# value a list and append the new entry
if lookup in query and not isinstance(query[lookup], (list, tuple)) and query[lookup] != value:
query[lookup] = [query[lookup] ] + [value]
else:
# If the value is a list, we can't just assign it to the query
# which will treat each element as its own value. So in this
# case we nest it. This has the side effect of throwing a BadValueError
# which we could throw ourselves, but the datastore might start supporting
# list values in lookups.. you never know!
if isinstance(value, (list, tuple)):
query[lookup] = [value]
else:
# Common case: just add the raw where constraint
query[lookup] = value
if ordering:
try:
query.Order(*ordering)
except datastore_errors.BadArgumentError as e:
# This is the easiest way to detect unsupported orderings
# ideally we'd detect this at the query normalization stage
# but it's a lot of hassle, this is much easier and seems to work OK
raise NotSupportedError(e)
queries.append(query)
if can_perform_datastore_get(self.query):
# Yay for optimizations!
return QueryByKeys(self.query.model, queries, ordering, self.namespace)
if len(queries) == 1:
identifier = query_is_unique(self.query.model, queries[0])
if identifier:
# Yay for optimizations!
return UniqueQuery(identifier, queries[0], self.query.model, self.namespace)
return queries[0]
else:
return datastore.MultiQuery(queries, ordering)
0
Example 36
def __init__(self, description, default=None, location=None):
"""
Setup a decimal range as specified by ``description``.
:param str description: a range description of the form \
``lower...upper`` or ``limit``, possibly consisting of multiple \
items. In case it is empty (``''``), the range specified by \
``default`` is used; the description also specifies the \
:py:attr:`~cutplace.ranges.DecimalRange.scale` and \
:py:attr:`~cutplace.ranges.DecimalRange.precision` valid numbers \
can use.
:param str default: an alternative to use in case ``description``
is ``None`` or empty; in case both ``description`` and \
``default`` are ``None`` or empty, all values within the \
:py:const:`DEFAULT_SCALE` and :py:const:`DEFAULT_PRECISION` are \
valid.
"""
assert default is None or (default.strip() != ''), "default=%r" % default
self._precision = DEFAULT_PRECISION
self._scale = DEFAULT_SCALE
# Find out if a `description` has been specified and if not, use optional `default` instead.
has_description = (description is not None) and (description.strip() != '')
if not has_description and default is not None:
description = default
has_description = True
if not has_description:
# Use empty ranges.
self._description = None
self._items = None
self._lower_limit = None
self._upper_limit = None
else:
self._description = description.replace('...', ELLIPSIS)
self._items = []
tokens = _tools.tokenize_without_space(self._description)
end_reached = False
max_digits_after_dot = 0
max_digits_before_dot = 0
while not end_reached:
lower = None
upper = None
ellipsis_found = False
after_hyphen = False
next_token = next(tokens)
while not _tools.is_eof_token(next_token) and not _tools.is_comma_token(next_token):
next_type = next_token[0]
next_value = next_token[1]
if next_type == token.NUMBER:
if next_type == token.NUMBER:
try:
decimal_value = decimal.Decimal(next_value)
_, digits, exponent = decimal_value.as_tuple()
digits_after_dot = max(0, -exponent)
if digits_after_dot > max_digits_after_dot:
max_digits_after_dot = digits_after_dot
digits_before_dot = len(digits) + exponent
if digits_before_dot > max_digits_before_dot:
max_digits_before_dot = digits_before_dot
except decimal.DecimalException:
raise errors.InterfaceError(
"number must be an decimal or integer but is: %s"
% _compat.text_repr(next_value), location)
if after_hyphen:
decimal_value = decimal_value.copy_negate()
after_hyphen = False
if ellipsis_found:
if upper is None:
upper = decimal_value
else:
raise errors.InterfaceError(
"range must have at most lower and upper limit but found another number: %s"
% _compat.text_repr(next_value), location)
elif lower is None:
lower = decimal_value
else:
raise errors.InterfaceError(
"number must be followed by ellipsis (...) but found: %s"
% _compat.text_repr(next_value))
elif after_hyphen:
raise errors.InterfaceError(
"hyphen (-) must be followed by number but found: %s" % _compat.text_repr(next_value))
elif (next_type == token.OP) and (next_value == "-"):
after_hyphen = True
elif next_value in (ELLIPSIS, ':'):
ellipsis_found = True
else:
message = "range must be specified using decimal or integer numbers" \
" and ellipsis (...) but found: %s [token type: %d]" \
% (_compat.text_repr(next_value), next_type)
raise errors.InterfaceError(message)
next_token = next(tokens)
if after_hyphen:
raise errors.InterfaceError("hyphen (-) at end must be followed by number")
# Decide upon the result.
if lower is None:
if upper is None:
if ellipsis_found:
# Handle "...".
# TODO: Handle "..." same as ""?
raise errors.InterfaceError("ellipsis (...) must be preceded and/or succeeded by number")
else:
assert ellipsis_found
# Handle "...y".
range_item = (None, upper)
elif ellipsis_found:
# Handle "x..." and "x...y".
if (upper is not None) and (lower > upper):
raise errors.InterfaceError(
"lower limit %s must be less or equal than upper limit %s"
% (_decimal_as_text(lower, self.precision), _decimal_as_text(upper, self.precision)))
range_item = (lower, upper)
else:
# Handle "x".
range_item = (lower, lower)
if range_item is not None:
self._precision = max_digits_after_dot
self._scale = max_digits_before_dot + max_digits_after_dot
for item in self._items:
if self._items_overlap(item, range_item):
item_text = _compat.text_repr(self._repr_item(item))
result_text = _compat.text_repr(self._repr_item(range_item))
raise errors.InterfaceError(
"overlapping parts in decimal range must be cleaned up: %s and %s"
% (item_text, result_text), location)
self._items.append(range_item)
if _tools.is_eof_token(next_token):
end_reached = True
assert self.precision >= 0
assert self.scale >= self.precision
self._lower_limit = None
self._upper_limit = None
is_first_item = True
for lower_item, upper_item in self._items:
if is_first_item:
self._lower_limit = lower_item
self._upper_limit = upper_item
is_first_item = False
if lower_item is None:
self._lower_limit = None
elif (self._lower_limit is not None) and (lower_item < self._lower_limit):
self._lower_limit = lower_item
if upper_item is None:
self._upper_limit = None
elif (self._upper_limit is not None) and (upper_item > self._upper_limit):
self._upper_limit = upper_item
0
Example 37
Project: snowy Source File: emitters.py
def construct(self):
"""
Recursively serialize a lot of types, and
in cases where it doesn't recognize the type,
it will fall back to Django's `smart_unicode`.
Returns `dict`.
"""
def _any(thing, fields=()):
"""
Dispatch, all types are routed through here.
"""
ret = None
if isinstance(thing, QuerySet):
ret = _qs(thing, fields=fields)
elif isinstance(thing, (tuple, list)):
ret = _list(thing)
elif isinstance(thing, dict):
ret = _dict(thing)
elif isinstance(thing, decimal.Decimal):
ret = str(thing)
elif isinstance(thing, Model):
ret = _model(thing, fields=fields)
elif isinstance(thing, HttpResponse):
raise HttpStatusCode(thing)
elif inspect.isfunction(thing):
if not inspect.getargspec(thing)[0]:
ret = _any(thing())
elif hasattr(thing, '__emittable__'):
f = thing.__emittable__
if inspect.ismethod(f) and len(inspect.getargspec(f)[0]) == 1:
ret = _any(f())
elif repr(thing).startswith("<django.db.models.fields.related.RelatedManager"):
ret = _any(thing.all())
else:
ret = smart_unicode(thing, strings_only=True)
return ret
def _fk(data, field):
"""
Foreign keys.
"""
return _any(getattr(data, field.name))
def _related(data, fields=()):
"""
Foreign keys.
"""
return [ _model(m, fields) for m in data.iterator() ]
def _m2m(data, field, fields=()):
"""
Many to many (re-route to `_model`.)
"""
return [ _model(m, fields) for m in getattr(data, field.name).iterator() ]
def _model(data, fields=()):
"""
Models. Will respect the `fields` and/or
`exclude` on the handler (see `typemapper`.)
"""
ret = { }
handler = self.in_typemapper(type(data), self.anonymous)
get_absolute_uri = False
if handler or fields:
v = lambda f: getattr(data, f.attname)
if not fields:
"""
Fields was not specified, try to find teh correct
version in the typemapper we were sent.
"""
mapped = self.in_typemapper(type(data), self.anonymous)
get_fields = set(mapped.fields)
exclude_fields = set(mapped.exclude).difference(get_fields)
if 'absolute_uri' in get_fields:
get_absolute_uri = True
if not get_fields:
get_fields = set([ f.attname.replace("_id", "", 1)
for f in data._meta.fields ])
# sets can be negated.
for exclude in exclude_fields:
if isinstance(exclude, basestring):
get_fields.discard(exclude)
elif isinstance(exclude, re._pattern_type):
for field in get_fields.copy():
if exclude.match(field):
get_fields.discard(field)
else:
get_fields = set(fields)
met_fields = self.method_fields(handler, get_fields)
for f in data._meta.local_fields:
if f.serialize and not any([ p in met_fields for p in [ f.attname, f.name ]]):
if not f.rel:
if f.attname in get_fields:
ret[f.attname] = _any(v(f))
get_fields.remove(f.attname)
else:
if f.attname[:-3] in get_fields:
ret[f.name] = _fk(data, f)
get_fields.remove(f.name)
for mf in data._meta.many_to_many:
if mf.serialize and mf.attname not in met_fields:
if mf.attname in get_fields:
ret[mf.name] = _m2m(data, mf)
get_fields.remove(mf.name)
# try to get the remainder of fields
for maybe_field in get_fields:
if isinstance(maybe_field, (list, tuple)):
model, fields = maybe_field
inst = getattr(data, model, None)
if inst:
if hasattr(inst, 'all'):
ret[model] = _related(inst, fields)
elif callable(inst):
if len(inspect.getargspec(inst)[0]) == 1:
ret[model] = _any(inst(), fields)
else:
ret[model] = _model(inst, fields)
elif maybe_field in met_fields:
# Overriding normal field which has a "resource method"
# so you can alter the contents of certain fields without
# using different names.
ret[maybe_field] = _any(met_fields[maybe_field](data))
else:
maybe = getattr(data, maybe_field, None)
if maybe:
if callable(maybe):
if len(inspect.getargspec(maybe)[0]) == 1:
ret[maybe_field] = _any(maybe())
else:
ret[maybe_field] = _any(maybe)
else:
handler_f = getattr(handler or self.handler, maybe_field, None)
if handler_f:
ret[maybe_field] = _any(handler_f(data))
else:
for f in data._meta.fields:
ret[f.attname] = _any(getattr(data, f.attname))
fields = dir(data.__class__) + ret.keys()
add_ons = [k for k in dir(data) if k not in fields]
for k in add_ons:
ret[k] = _any(getattr(data, k))
# resouce uri
if self.in_typemapper(type(data), self.anonymous):
handler = self.in_typemapper(type(data), self.anonymous)
if hasattr(handler, 'resource_uri'):
url_id, fields = handler.resource_uri(data)
try:
ret['resource_uri'] = reverser( lambda: (url_id, fields) )()
except NoReverseMatch, e:
pass
if hasattr(data, 'get_api_url') and 'resource_uri' not in ret:
try: ret['resource_uri'] = data.get_api_url()
except: pass
# absolute uri
if hasattr(data, 'get_absolute_url') and get_absolute_uri:
try: ret['absolute_uri'] = data.get_absolute_url()
except: pass
return ret
def _qs(data, fields=()):
"""
Querysets.
"""
return [ _any(v, fields) for v in data ]
def _list(data):
"""
Lists.
"""
return [ _any(v) for v in data ]
def _dict(data):
"""
Dictionaries.
"""
return dict([ (k, _any(v)) for k, v in data.iteritems() ])
# Kickstart the seralizin'.
return _any(self.data, self.fields)
0
Example 38
Project: RaceCapture_App Source File: __init__.py
def _get_ticks(self, major, minor, log, s_min, s_max):
if major and s_max > s_min:
if log:
s_min = log10(s_min)
s_max = log10(s_max)
# count the decades in min - max. This is in actual decades,
# not logs.
n_decades = floor(s_max - s_min)
# for the fractional part of the last decade, we need to
# convert the log value, x, to 10**x but need to handle
# differently if the last incomplete decade has a decade
# boundary in it
if floor(s_min + n_decades) != floor(s_max):
n_decades += 1 - (10 ** (s_min + n_decades + 1) - 10 **
s_max) / 10 ** floor(s_max + 1)
else:
n_decades += ((10 ** s_max - 10 ** (s_min + n_decades)) /
10 ** floor(s_max + 1))
# this might be larger than what is needed, but we delete
# excess later
n_ticks_major = n_decades / float(major)
n_ticks = int(floor(n_ticks_major * (minor if minor >=
1. else 1.0))) + 2
# in decade multiples, e.g. 0.1 of the decade, the distance
# between ticks
decade_dist = major / float(minor if minor else 1.0)
points_minor = [0] * n_ticks
points_major = [0] * n_ticks
k = 0 # position in points major
k2 = 0 # position in points minor
# because each decade is missing 0.1 of the decade, if a tick
# falls in < min_pos skip it
min_pos = 0.1 - 0.00001 * decade_dist
s_min_low = floor(s_min)
# first real tick location. value is in fractions of decades
# from the start we have to use decimals here, otherwise
# floating point inaccuracies results in bad values
start_dec = ceil((10 ** Decimal(s_min - s_min_low - 1)) /
Decimal(decade_dist)) * decade_dist
count_min = (0 if not minor else
floor(start_dec / decade_dist) % minor)
start_dec += s_min_low
count = 0 # number of ticks we currently have passed start
while True:
# this is the current position in decade that we are.
# e.g. -0.9 means that we're at 0.1 of the 10**ceil(-0.9)
# decade
pos_dec = start_dec + decade_dist * count
pos_dec_low = floor(pos_dec)
diff = pos_dec - pos_dec_low
zero = abs(diff) < 0.001 * decade_dist
if zero:
# the same value as pos_dec but in log scale
pos_log = pos_dec_low
else:
pos_log = log10((pos_dec - pos_dec_low
) * 10 ** ceil(pos_dec))
if pos_log > s_max:
break
count += 1
if zero or diff >= min_pos:
if minor and not count_min % minor:
points_major[k] = pos_log
k += 1
else:
points_minor[k2] = pos_log
k2 += 1
count_min += 1
#n_ticks = len(points)
else:
# distance between each tick
tick_dist = major / float(minor if minor else 1.0)
n_ticks = int(floor((s_max - s_min) / tick_dist) + 1)
points_major = [0] * int(floor((s_max - s_min) / float(major))
+ 1)
points_minor = [0] * (n_ticks - len(points_major) + 1)
k = 0 # position in points major
k2 = 0 # position in points minor
for m in range(0, n_ticks):
if minor and m % minor:
points_minor[k2] = m * tick_dist + s_min
k2 += 1
else:
points_major[k] = m * tick_dist + s_min
k += 1
del points_major[k:]
del points_minor[k2:]
else:
points_major = []
points_minor = []
return points_major, points_minor
0
Example 39
Project: sharpy Source File: parser_tests.py
def test_plans_parser_with_items(self):
plans_xml = self.load_file('plans_with_items.xml')
parser = PlansParser()
expected = [ { 'billing_frequency': 'monthly',
'billing_frequency_per': 'month',
'billing_frequency_quantity': 1,
'billing_frequency_unit': 'months',
'code': 'FREE_MONTHLY',
'created_datetime': datetime(2011, 1, 7, 20, 46, 43, tzinfo=tzutc()),
'description': 'A free monthly plan',
'id': '6b0d13f4-6bef-102e-b098-40402145ee8b',
'initial_bill_count': 1,
'initial_bill_count_unit': 'months',
'is_active': True,
'is_free': True,
'items': [ { 'code': 'MONTHLY_ITEM',
'created_datetime': datetime(2011, 1, 10, 22, 40, 34, tzinfo=tzutc()),
'id': 'd19b4970-6e5a-102e-b098-40402145ee8b',
'is_periodic': False,
'name': 'Monthly Item',
'overage_amount': Decimal('0.00'),
'quantity_included': Decimal('0')},
{ 'code': 'ONCE_ITEM',
'created_datetime': datetime(2011, 1, 10, 22, 40, 34, tzinfo=tzutc()),
'id': 'd19ef2f0-6e5a-102e-b098-40402145ee8b',
'is_periodic': False,
'name': 'Once Item',
'overage_amount': Decimal('0.00'),
'quantity_included': Decimal('0')}],
'name': 'Free Monthly',
'recurring_charge_amount': Decimal('0.00'),
'recurring_charge_code': 'FREE_MONTHLY_RECURRING',
'setup_charge_amount': Decimal('0.00'),
'setup_charge_code': '',
'trial_days': 0},
{ 'billing_frequency': 'monthly',
'billing_frequency_per': 'month',
'billing_frequency_quantity': 1,
'billing_frequency_unit': 'months',
'code': 'TRACKED_MONTHLY',
'created_datetime': datetime(2011, 1, 10, 22, 40, 34, tzinfo=tzutc()),
'description': '',
'id': 'd19974a6-6e5a-102e-b098-40402145ee8b',
'initial_bill_count': 1,
'initial_bill_count_unit': 'months',
'is_active': True,
'is_free': False,
'items': [ { 'code': 'MONTHLY_ITEM',
'created_datetime': datetime(2011, 1, 10, 22, 40, 34, tzinfo=tzutc()),
'id': 'd19b4970-6e5a-102e-b098-40402145ee8b',
'is_periodic': True,
'name': 'Monthly Item',
'overage_amount': Decimal('10.00'),
'quantity_included': Decimal('2')},
{ 'code': 'ONCE_ITEM',
'created_datetime': datetime(2011, 1, 10, 22, 40, 34, tzinfo=tzutc()),
'id': 'd19ef2f0-6e5a-102e-b098-40402145ee8b',
'is_periodic': False,
'name': 'Once Item',
'overage_amount': Decimal('10.00'),
'quantity_included': Decimal('0')}],
'name': 'Tracked Monthly',
'recurring_charge_amount': Decimal('10.00'),
'recurring_charge_code': 'TRACKED_MONTHLY_RECURRING',
'setup_charge_amount': Decimal('0.00'),
'setup_charge_code': '',
'trial_days': 0},
{ 'billing_frequency': 'monthly',
'billing_frequency_per': 'month',
'billing_frequency_quantity': 1,
'billing_frequency_unit': 'months',
'code': 'PAID_MONTHLY',
'created_datetime': datetime(2011, 1, 7, 21, 5, 42, tzinfo=tzutc()),
'description': '',
'id': '11af9cfc-6bf2-102e-b098-40402145ee8b',
'initial_bill_count': 1,
'initial_bill_count_unit': 'months',
'is_active': True,
'is_free': False,
'items': [ { 'code': 'MONTHLY_ITEM',
'created_datetime': datetime(2011, 1, 10, 22, 40, 34, tzinfo=tzutc()),
'id': 'd19b4970-6e5a-102e-b098-40402145ee8b',
'is_periodic': False,
'name': 'Monthly Item',
'overage_amount': Decimal('0.00'),
'quantity_included': Decimal('0')},
{ 'code': 'ONCE_ITEM',
'created_datetime': datetime(2011, 1, 10, 22, 40, 34, tzinfo=tzutc()),
'id': 'd19ef2f0-6e5a-102e-b098-40402145ee8b',
'is_periodic': False,
'name': 'Once Item',
'overage_amount': Decimal('0.00'),
'quantity_included': Decimal('0')}],
'name': 'Paid Monthly',
'recurring_charge_amount': Decimal('20.00'),
'recurring_charge_code': 'PAID_MONTHLY_RECURRING',
'setup_charge_amount': Decimal('0.00'),
'setup_charge_code': '',
'trial_days': 0}]
result = parser.parse_xml(plans_xml)
import pprint
pp = pprint.PrettyPrinter(indent=4)
pp.pprint(result)
self.assertEquals(expected, result)
0
Example 40
Project: sharpy Source File: parser_tests.py
def test_customers_parser_with_items(self):
customers_xml = self.load_file('customers-with-items.xml')
parser = CustomersParser()
expected = [ { 'campaign_content': '',
'campaign_medium': '',
'campaign_name': '',
'campaign_source': '',
'campaign_term': '',
'code': 'test',
'company': '',
'created_datetime': datetime(2011, 1, 10, 23, 57, 58, tzinfo=tzutc()),
'email': '[email protected]',
'first_contact_datetime': None,
'first_name': 'Test',
'gateway_token': None,
'id': 'a1f143e0-6e65-102e-b098-40402145ee8b',
'is_vat_exempt': '0',
'last_name': 'User',
'meta_data': [],
'modified_datetime': datetime(2011, 1, 10, 23, 57, 58, tzinfo=tzutc()),
'notes': '',
'referer': '',
'referer_host': '',
'subscriptions': [ { 'cancel_reason': None,
'cancel_type': None,
'canceled_datetime': None,
'cc_address': '123 Something St',
'cc_city': 'Someplace',
'cc_company': 'Some Co LLC',
'cc_country': 'United States',
'cc_email': None,
'cc_expiration_date': '2011-07-31T00:00:00+00:00',
'cc_first_name': 'Test',
'cc_last_four': '1111',
'cc_last_name': 'User',
'cc_state': 'NY',
'cc_type': 'visa',
'cc_zip': '12345',
'created_datetime': datetime(2011, 1, 10, 23, 57, 58, tzinfo=tzutc()),
'gateway_token': 'SIMULATED',
'id': 'a1f27c60-6e65-102e-b098-40402145ee8b',
'invoices': [ { 'billing_datetime': datetime(2011, 2, 10, 23, 57, 58, tzinfo=tzutc()),
'charges': [ { 'code': 'TRACKED_MONTHLY_RECURRING',
'created_datetime': datetime(2011, 2, 10, 23, 57, 58, tzinfo=tzutc()),
'description': '',
'each_amount': Decimal('10.00'),
'id': '',
'quantity': Decimal('1'),
'type': 'recurring'},
{ 'code': 'MONTHLY_ITEM',
'created_datetime': datetime(2011, 1, 10, 23, 57, 58, tzinfo=tzutc()),
'description': '',
'each_amount': Decimal('10.00'),
'id': '',
'quantity': Decimal('1'),
'type': 'item'},
{ 'code': 'ONCE_ITEM',
'created_datetime': datetime(2011, 1, 10, 23, 57, 58, tzinfo=tzutc()),
'description': '',
'each_amount': Decimal('10.00'),
'id': '',
'quantity': Decimal('1'),
'type': 'item'}],
'created_datetime': datetime(2011, 1, 10, 23, 57, 58, tzinfo=tzutc()),
'id': 'a1f7faaa-6e65-102e-b098-40402145ee8b',
'number': '1',
'paid_transaction_id': '',
'type': 'subscription',
'vat_rate': ''}],
'items': [ { 'code': 'MONTHLY_ITEM',
'created_datetime': datetime(2011, 1, 10, 23, 57, 58, tzinfo=tzutc()),
'id': 'd19b4970-6e5a-102e-b098-40402145ee8b',
'modified_datetime': datetime(2011, 1, 10, 23, 57, 58, tzinfo=tzutc()),
'name': 'Monthly Item',
'quantity': Decimal('3')},
{ 'code': 'ONCE_ITEM',
'created_datetime': datetime(2011, 1, 10, 23, 57, 58, tzinfo=tzutc()),
'id': 'd19ef2f0-6e5a-102e-b098-40402145ee8b',
'modified_datetime': datetime(2011, 1, 10, 23, 57, 58, tzinfo=tzutc()),
'name': 'Once Item',
'quantity': Decimal('1')}],
'plans': [ { 'billing_frequency': 'monthly',
'billing_frequency_per': 'month',
'billing_frequency_quantity': 1,
'billing_frequency_unit': 'months',
'code': 'TRACKED_MONTHLY',
'created_datetime': datetime(2011, 1, 10, 22, 40, 34, tzinfo=tzutc()),
'description': '',
'id': 'd19974a6-6e5a-102e-b098-40402145ee8b',
'initial_bill_count': 1,
'initial_bill_count_unit': 'months',
'is_active': True,
'is_free': False,
'items': [ { 'code': 'MONTHLY_ITEM',
'created_datetime': datetime(2011, 1, 10, 22, 40, 34, tzinfo=tzutc()),
'id': 'd19b4970-6e5a-102e-b098-40402145ee8b',
'is_periodic': True,
'name': 'Monthly Item',
'overage_amount': Decimal('10.00'),
'quantity_included': Decimal('2')},
{ 'code': 'ONCE_ITEM',
'created_datetime': datetime(2011, 1, 10, 22, 40, 34, tzinfo=tzutc()),
'id': 'd19ef2f0-6e5a-102e-b098-40402145ee8b',
'is_periodic': False,
'name': 'Once Item',
'overage_amount': Decimal('10.00'),
'quantity_included': Decimal('0')}],
'name': 'Tracked Monthly',
'recurring_charge_amount': Decimal('10.00'),
'recurring_charge_code': 'TRACKED_MONTHLY_RECURRING',
'setup_charge_amount': Decimal('0.00'),
'setup_charge_code': '',
'trial_days': 0}],
'redirect_url': None}],
'vat_number': ''}]
result = parser.parse_xml(customers_xml)
import pprint
pp = pprint.PrettyPrinter(indent=4)
pp.pprint(result)
self.assertEquals(expected, result)
0
Example 41
Project: silver Source File: views.py
def patch(self, request, *args, **kwargs):
mf_product_code = self.kwargs.get('mf_product_code', None)
subscription_pk = self.kwargs.get('subscription_pk', None)
try:
subscription = Subscription.objects.get(pk=subscription_pk)
except Subscription.DoesNotExist:
return Response({"detail": "Subscription Not found."},
status=status.HTTP_404_NOT_FOUND)
# TODO: change this to try-except
metered_feature = get_object_or_None(
subscription.plan.metered_features,
product_code__value=mf_product_code
)
if not metered_feature:
return Response({"detail": "Metered Feature Not found."},
status=status.HTTP_404_NOT_FOUND)
if subscription.state != 'active':
return Response({"detail": "Subscription is not active."},
status=status.HTTP_403_FORBIDDEN)
required_fields = ['date', 'count', 'update_type']
provided_fields = {}
errors = {}
for field in required_fields:
try:
provided_fields[field] = request.data[field]
except KeyError:
errors[field] = ["This field is required."]
for key in provided_fields:
if not provided_fields[key]:
errors[key] = ["This field may not be blank."]
if errors:
return Response(errors, status=status.HTTP_400_BAD_REQUEST)
date = request.data['date']
consumed_units = request.data['count']
update_type = request.data['update_type']
consumed_units = Decimal(consumed_units)
try:
date = datetime.datetime.strptime(date,
'%Y-%m-%d').date()
except TypeError:
return Response({'detail': 'Invalid date format. Please '
'use the ISO 8601 date format.'},
status=status.HTTP_400_BAD_REQUEST)
if date < subscription.start_date:
return Response({"detail": "Date is out of bounds."},
status=status.HTTP_400_BAD_REQUEST)
bsd = subscription.bucket_start_date(date)
bed = subscription.bucket_end_date(date)
if not bsd or not bed:
return Response(
{'detail': 'An error has been encountered.'},
status=status.HTTP_500_INTERNAL_SERVER_ERROR)
interval = next(
(i for i in subscription.updateable_buckets()
if i['start_date'] == bsd and i['end_date'] == bed),
None)
if interval is None:
return Response({"detail": "Date is out of bounds."},
status=status.HTTP_400_BAD_REQUEST)
if metered_feature not in \
subscription.plan.metered_features.all():
err = "The metered feature does not belong to the " \
"subscription's plan."
return Response(
{"detail": err},
status=status.HTTP_400_BAD_REQUEST
)
log = MeteredFeatureUnitsLog.objects.filter(
start_date=bsd,
end_date=bed,
metered_feature=metered_feature.pk,
subscription=subscription_pk
).first()
if log is not None:
if update_type == 'absolute':
log.consumed_units = consumed_units
elif update_type == 'relative':
log.consumed_units += consumed_units
log.save()
else:
log = MeteredFeatureUnitsLog.objects.create(
metered_feature=metered_feature,
subscription=subscription,
start_date=bsd,
end_date=bed,
consumed_units=consumed_units
)
return Response({"count": log.consumed_units},
status=status.HTTP_200_OK)
0
Example 42
Project: PyClassLessons Source File: writer.py
@classmethod
def serialize(cls, value):
"""
Serializes the value to a string that's parsable by Python, along
with any needed imports to make that string work.
More advanced than repr() as it can encode things
like datetime.datetime.now.
"""
# FIXME: Ideally Promise would be reconstructible, but for now we
# use force_text on them and defer to the normal string serialization
# process.
if isinstance(value, Promise):
value = force_text(value)
# Sequences
if isinstance(value, (list, set, tuple)):
imports = set()
strings = []
for item in value:
item_string, item_imports = cls.serialize(item)
imports.update(item_imports)
strings.append(item_string)
if isinstance(value, set):
format = "set([%s])"
elif isinstance(value, tuple):
# When len(value)==0, the empty tuple should be serialized as
# "()", not "(,)" because (,) is invalid Python syntax.
format = "(%s)" if len(value) != 1 else "(%s,)"
else:
format = "[%s]"
return format % (", ".join(strings)), imports
# Dictionaries
elif isinstance(value, dict):
imports = set()
strings = []
for k, v in value.items():
k_string, k_imports = cls.serialize(k)
v_string, v_imports = cls.serialize(v)
imports.update(k_imports)
imports.update(v_imports)
strings.append((k_string, v_string))
return "{%s}" % (", ".join("%s: %s" % (k, v) for k, v in strings)), imports
# Datetimes
elif isinstance(value, datetime.datetime):
value_repr = cls.serialize_datetime(value)
imports = ["import datetime"]
if value.tzinfo is not None:
imports.append("from django.utils.timezone import utc")
return value_repr, set(imports)
# Dates
elif isinstance(value, datetime.date):
value_repr = repr(value)
if isinstance(value, datetime_safe.date):
value_repr = "datetime.%s" % value_repr
return value_repr, set(["import datetime"])
# Times
elif isinstance(value, datetime.time):
value_repr = repr(value)
if isinstance(value, datetime_safe.time):
value_repr = "datetime.%s" % value_repr
return value_repr, {"import datetime"}
# Settings references
elif isinstance(value, SettingsReference):
return "settings.%s" % value.setting_name, set(["from django.conf import settings"])
# Simple types
elif isinstance(value, float):
if math.isnan(value) or math.isinf(value):
return 'float("{}")'.format(value), set()
return repr(value), set()
elif isinstance(value, six.integer_types + (bool, type(None))):
return repr(value), set()
elif isinstance(value, six.binary_type):
value_repr = repr(value)
if six.PY2:
# Prepend the `b` prefix since we're importing unicode_literals
value_repr = 'b' + value_repr
return value_repr, set()
elif isinstance(value, six.text_type):
value_repr = repr(value)
if six.PY2:
# Strip the `u` prefix since we're importing unicode_literals
value_repr = value_repr[1:]
return value_repr, set()
# Decimal
elif isinstance(value, decimal.Decimal):
return repr(value), set(["from decimal import Decimal"])
# Django fields
elif isinstance(value, models.Field):
attr_name, path, args, kwargs = value.deconstruct()
return cls.serialize_deconstructed(path, args, kwargs)
# Classes
elif isinstance(value, type):
special_cases = [
(models.Model, "models.Model", []),
]
for case, string, imports in special_cases:
if case is value:
return string, set(imports)
if hasattr(value, "__module__"):
module = value.__module__
if module == six.moves.builtins.__name__:
return value.__name__, set()
else:
return "%s.%s" % (module, value.__name__), {"import %s" % module}
# Anything that knows how to deconstruct itself.
elif hasattr(value, 'deconstruct'):
return cls.serialize_deconstructed(*value.deconstruct())
# Functions
elif isinstance(value, (types.FunctionType, types.BuiltinFunctionType)):
# @classmethod?
if getattr(value, "__self__", None) and isinstance(value.__self__, type):
klass = value.__self__
module = klass.__module__
return "%s.%s.%s" % (module, klass.__name__, value.__name__), set(["import %s" % module])
# Further error checking
if value.__name__ == '<lambda>':
raise ValueError("Cannot serialize function: lambda")
if value.__module__ is None:
raise ValueError("Cannot serialize function %r: No module" % value)
# Python 3 is a lot easier, and only uses this branch if it's not local.
if getattr(value, "__qualname__", None) and getattr(value, "__module__", None):
if "<" not in value.__qualname__: # Qualname can include <locals>
return "%s.%s" % (value.__module__, value.__qualname__), set(["import %s" % value.__module__])
# Python 2/fallback version
module_name = value.__module__
# Make sure it's actually there and not an unbound method
module = import_module(module_name)
if not hasattr(module, value.__name__):
raise ValueError(
"Could not find function %s in %s.\nPlease note that "
"due to Python 2 limitations, you cannot serialize "
"unbound method functions (e.g. a method declared\n"
"and used in the same class body). Please move the "
"function into the main module body to use migrations.\n"
"For more information, see https://docs.djangoproject.com/en/1.7/topics/migrations/#serializing-values"
% (value.__name__, module_name))
return "%s.%s" % (module_name, value.__name__), set(["import %s" % module_name])
# Other iterables
elif isinstance(value, collections.Iterable):
imports = set()
strings = []
for item in value:
item_string, item_imports = cls.serialize(item)
imports.update(item_imports)
strings.append(item_string)
# When len(strings)==0, the empty iterable should be serialized as
# "()", not "(,)" because (,) is invalid Python syntax.
format = "(%s)" if len(strings) != 1 else "(%s,)"
return format % (", ".join(strings)), imports
# Compiled regex
elif isinstance(value, COMPILED_REGEX_TYPE):
imports = set(["import re"])
regex_pattern, pattern_imports = cls.serialize(value.pattern)
regex_flags, flag_imports = cls.serialize(value.flags)
imports.update(pattern_imports)
imports.update(flag_imports)
args = [regex_pattern]
if value.flags:
args.append(regex_flags)
return "re.compile(%s)" % ', '.join(args), imports
# Uh oh.
else:
raise ValueError(
"Cannot serialize: %r\nThere are some values Django cannot serialize into "
"migration files.\nFor more, see https://docs.djangoproject.com/en/dev/"
"topics/migrations/#migration-serializing" % value
)
0
Example 43
Project: sharpy Source File: parser_tests.py
def test_paypal_customer_parse(self):
customers_xml = self.load_file('paypal_customer.xml')
parser = CustomersParser()
expected = [ { 'campaign_content': '',
'campaign_medium': '',
'campaign_name': '',
'campaign_source': '',
'campaign_term': '',
'code': 'test',
'company': '',
'created_datetime': datetime(2011, 5, 16, 16, 36, 1, tzinfo=tzutc()),
'email': '[email protected]',
'first_contact_datetime': None,
'first_name': 'Test',
'gateway_token': None,
'id': '95d7696a-7fda-11e0-a51b-40403c39f8d9',
'is_vat_exempt': '0',
'last_name': 'User',
'meta_data': [],
'modified_datetime': datetime(2011, 5, 16, 16, 36, 1, tzinfo=tzutc()),
'notes': '',
'referer': '',
'referer_host': '',
'subscriptions': [ { 'cancel_reason': 'PayPal preapproval is pending',
'cancel_type': 'paypal-wait',
'canceled_datetime': datetime(2011, 5, 16, 16, 36, 1, tzinfo=tzutc()),
'cc_address': '',
'cc_city': '',
'cc_company': '',
'cc_country': '',
'cc_email': '',
'cc_expiration_date': '2012-05-16T00:00:00+00:00',
'cc_first_name': 'Test',
'cc_last_four': '',
'cc_last_name': 'User',
'cc_state': '',
'cc_type': '',
'cc_zip': '',
'created_datetime': datetime(2011, 5, 16, 16, 36, 1, tzinfo=tzutc()),
'gateway_account': { 'gateway': 'PayPal_Simulator',
'id': '303f9a50-7fda-11e0-a51b-40403c39f8d9',
'type': 'paypal'},
'gateway_token': 'SIMULATED-4dd152718371a',
'id': '95d804ba-7fda-11e0-a51b-40403c39f8d9',
'invoices': [ { 'billing_datetime': datetime(2011, 6, 16, 16, 36, 1, tzinfo=tzutc()),
'charges': [ { 'code': 'PAID_MONTHLY_RECURRING',
'created_datetime': datetime(2011, 6, 16, 16, 36, 1, tzinfo=tzutc()),
'description': '',
'each_amount': Decimal('20.00'),
'id': '',
'quantity': Decimal('1'),
'type': 'recurring'}],
'created_datetime': datetime(2011, 5, 16, 16, 36, 1, tzinfo=tzutc()),
'id': '95de499c-7fda-11e0-a51b-40403c39f8d9',
'number': '1',
'paid_transaction_id': '',
'type': 'subscription',
'vat_rate': ''}],
'items': [ { 'code': 'MONTHLY_ITEM',
'created_datetime': None,
'id': 'd19b4970-6e5a-102e-b098-40402145ee8b',
'modified_datetime': None,
'name': 'Monthly Item',
'quantity': Decimal('0')},
{ 'code': 'ONCE_ITEM',
'created_datetime': None,
'id': 'd19ef2f0-6e5a-102e-b098-40402145ee8b',
'modified_datetime': None,
'name': 'Once Item',
'quantity': Decimal('0')}],
'plans': [ { 'billing_frequency': 'monthly',
'billing_frequency_per': 'month',
'billing_frequency_quantity': 1,
'billing_frequency_unit': 'months',
'code': 'PAID_MONTHLY',
'created_datetime': datetime(2011, 1, 7, 21, 5, 42, tzinfo=tzutc()),
'description': '',
'id': '11af9cfc-6bf2-102e-b098-40402145ee8b',
'initial_bill_count': 1,
'initial_bill_count_unit': 'months',
'is_active': True,
'is_free': False,
'items': [ { 'code': 'MONTHLY_ITEM',
'created_datetime': datetime(2011, 1, 10, 22, 40, 34, tzinfo=tzutc()),
'id': 'd19b4970-6e5a-102e-b098-40402145ee8b',
'is_periodic': False,
'name': 'Monthly Item',
'overage_amount': Decimal('0.00'),
'quantity_included': Decimal('0')},
{ 'code': 'ONCE_ITEM',
'created_datetime': datetime(2011, 1, 10, 22, 40, 34, tzinfo=tzutc()),
'id': 'd19ef2f0-6e5a-102e-b098-40402145ee8b',
'is_periodic': False,
'name': 'Once Item',
'overage_amount': Decimal('0.00'),
'quantity_included': Decimal('0')}],
'name': 'Paid Monthly',
'recurring_charge_amount': Decimal('20.00'),
'recurring_charge_code': 'PAID_MONTHLY_RECURRING',
'setup_charge_amount': Decimal('0.00'),
'setup_charge_code': '',
'trial_days': 0}],
'redirect_url': 'https://cheddargetter.com/service/paypal/simulate/productId/2ccbecd6-6beb-102e-b098-40402145ee8b/id/95d7696a-7fda-11e0-a51b-40403c39f8d9?preapprovalkey=SIMULATED-4dd152718371a'}],
'vat_number': ''}]
result = parser.parse_xml(customers_xml)
import pprint
pp = pprint.PrettyPrinter(indent=4)
pp.pprint(result)
self.assertEquals(expected, result)
0
Example 44
Project: git-pylint-commit-hook Source File: commit_hook.py
def check_repo(
limit, pylint='pylint', pylintrc=None, pylint_params='',
suppress_report=False, always_show_violations=False, ignored_files=None):
""" Main function doing the checks
:type limit: float
:param limit: Minimum score to pass the commit
:type pylint: str
:param pylint: Path to pylint executable
:type pylintrc: str
:param pylintrc: Path to pylintrc file
:type pylint_params: str
:param pylint_params: Custom pylint parameters to add to the pylint command
:type suppress_report: bool
:param suppress_report: Suppress report if score is below limit
:type always_show_violations: bool
:param always_show_violations: Show violations in case of pass as well
:type ignored_files: list
:param ignored_files: List of files to exclude from the validation
"""
# Lists are mutable and should not be assigned in function arguments
if ignored_files is None:
ignored_files = []
# List of checked files and their results
python_files = []
# Set the exit code
all_filed_passed = True
if pylintrc is None:
# If no config is found, use the old default '.pylintrc'
pylintrc = pylint_config.find_pylintrc() or '.pylintrc'
# Stash any unstaged changes while we look at the tree
with _stash_unstaged():
# Find Python files
for filename in _get_list_of_committed_files():
try:
if _is_python_file(filename) and \
not _is_ignored(filename, ignored_files):
python_files.append((filename, None))
except IOError:
print('File not found (probably deleted): {}\t\tSKIPPED'.format(
filename))
# Don't do anything if there are no Python files
if len(python_files) == 0:
sys.exit(0)
# Load any pre-commit-hooks options from a .pylintrc file (if there is one)
if os.path.exists(pylintrc):
conf = configparser.SafeConfigParser()
conf.read(pylintrc)
if conf.has_option('pre-commit-hook', 'command'):
pylint = conf.get('pre-commit-hook', 'command')
if conf.has_option('pre-commit-hook', 'params'):
pylint_params += ' ' + conf.get('pre-commit-hook', 'params')
if conf.has_option('pre-commit-hook', 'limit'):
limit = float(conf.get('pre-commit-hook', 'limit'))
# Pylint Python files
i = 1
for python_file, score in python_files:
# Allow __init__.py files to be completely empty
if os.path.basename(python_file) == '__init__.py':
if os.stat(python_file).st_size == 0:
print(
'Skipping pylint on {} (empty __init__.py)..'
'\tSKIPPED'.format(python_file))
# Bump parsed files
i += 1
continue
# Start pylinting
sys.stdout.write("Running pylint on {} (file {}/{})..\t".format(
python_file, i, len(python_files)))
sys.stdout.flush()
try:
command = [pylint]
if pylint_params:
command += pylint_params.split()
if '--rcfile' not in pylint_params:
command.append('--rcfile={}'.format(pylintrc))
else:
command.append('--rcfile={}'.format(pylintrc))
command.append(python_file)
proc = subprocess.Popen(
command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, _ = proc.communicate()
except OSError:
print("\nAn error occurred. Is pylint installed?")
sys.exit(1)
# Verify the score
score = _parse_score(out)
ignored = _check_ignore(out)
if ignored or score >= float(limit):
status = 'PASSED'
else:
status = 'FAILED'
all_filed_passed = False
# Add some output
print('{:.2}/10.00\t{}{}'.format(
decimal.Decimal(score),
status,
ignored and '\tIGNORED' or ''))
status_check_list = ['FAILED']
if always_show_violations:
status_check_list.append('PASSED')
if status in status_check_list:
if suppress_report:
command.append('--reports=n')
proc = subprocess.Popen(
command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, _ = proc.communicate()
print(_futurize_str(out))
# Bump parsed files
i += 1
return all_filed_passed
0
Example 45
Project: fe Source File: stdlib_decimal.py
def numeric_pack(x,
numeric_digit_length : "number of decimal digits in a numeric digit" = 4,
get0 = itemgetter(0),
get1 = itemgetter(1),
Decimal = Decimal,
pack = lib.numeric_pack
):
if not isinstance(x, Decimal):
x = Decimal(x)
x = x.as_tuple()
if x.exponent == 'F':
raise ValueError("numeric does not support infinite values")
# normalize trailing zeros (truncate em')
# this is important in order to get the weight and padding correct
# and to avoid packing superfluous data which will make pg angry.
trailing_zeros = 0
weight = 0
if x.exponent < 0:
# only attempt to truncate if there are digits after the point,
##
for i in range(-1, max(-len(x.digits), x.exponent)-1, -1):
if x.digits[i] != 0:
break
trailing_zeros += 1
# truncate trailing zeros right of the decimal point
# this *is* the case as exponent < 0.
if trailing_zeros:
digits = x.digits[:-trailing_zeros]
else:
digits = x.digits
# the entire exponent is just trailing zeros(zero-weight).
rdigits = -(x.exponent + trailing_zeros)
ldigits = len(digits) - rdigits
rpad = rdigits % numeric_digit_length
if rpad:
rpad = numeric_digit_length - rpad
else:
# Need the weight to be divisible by four,
# so append zeros onto digits until it is.
r = (x.exponent % numeric_digit_length)
if x.exponent and r:
digits = x.digits + ((0,) * r)
weight = (x.exponent - r)
else:
digits = x.digits
weight = x.exponent
# The exponent is not evenly divisible by four, so
# the weight can't simple be x.exponent as it doesn't
# match the size of the numeric digit.
ldigits = len(digits)
# no fractional quantity.
rdigits = 0
rpad = 0
lpad = ldigits % numeric_digit_length
if lpad:
lpad = numeric_digit_length - lpad
weight += (ldigits + lpad)
digit_groups = map(
get1,
groupby(
zip(
# group by NUMERIC digit size,
# every four digits make up a NUMERIC digit
cycle((0,) * numeric_digit_length + (1,) * numeric_digit_length),
# multiply each digit appropriately
# for the eventual sum() into a NUMERIC digit
starmap(
mul,
zip(
# pad with leading zeros to make
# the cardinality of the digit sequence
# to be evenly divisible by four,
# the NUMERIC digit size.
chain(
repeat(0, lpad),
digits,
repeat(0, rpad),
),
cycle([10**x for x in range(numeric_digit_length-1, -1, -1)]),
)
),
),
get0,
),
)
return pack((
(
(ldigits + rdigits + lpad + rpad) // numeric_digit_length, # ndigits
(weight // numeric_digit_length) - 1, # NUMERIC weight
numeric_negative if x.sign == 1 else x.sign, # sign
- x.exponent if x.exponent < 0 else 0, # dscale
),
list(map(sum, ([get1(y) for y in x] for x in digit_groups))),
))
0
Example 46
Project: coursys Source File: financials.py
@login_required
def financials(request, grad_slug, style='complete'):
if style not in STYLES:
return NotFoundResponse(request)
grad, _, units = _can_view_student(request, grad_slug, funding=True)
if grad is None:
return ForbiddenResponse(request)
current_status = GradStatus.objects.filter(student=grad, hidden=False).order_by('-start')[0]
grad_status_qs = GradStatus.objects.filter(student=grad, hidden=False, status__in=STATUS_ACTIVE).select_related('start','end')
scholarships_qs = Scholarship.objects.filter(student=grad, removed=False).select_related('start_semester','end_semester')
promises_qs = Promise.objects.filter(student=grad, removed=False).select_related('start_semester','end_semester')
other_fundings = OtherFunding.objects.filter(student=grad, removed=False).select_related('semester')
contracts = TAContract.objects.filter(application__person=grad.person).exclude(status__in=STATUSES_NOT_TAING).select_related('posting__semester')
other_contracts = NewTAContract.objects.filter(person=grad.person, status__in=['NEW', 'SGN'])\
.select_related('category')\
.prefetch_related('course')
appointments = RAAppointment.objects.filter(person=grad.person, deleted=False)
program_history = GradProgramHistory.objects.filter(student=grad).select_related('start_semester', 'program')
financial_comments = FinancialComment.objects.filter(student=grad, removed=False).select_related('semester')
# initialize earliest starting and latest ending semesters for display.
# Falls back on current semester if none
all_semesters = itertools.chain( # every semester we have info for
(s.start for s in grad_status_qs),
(s.end for s in grad_status_qs),
(p.start_semester for p in promises_qs),
(p.end_semester for p in promises_qs),
(s.start_semester for s in scholarships_qs),
(s.end_semester for s in scholarships_qs),
(o.semester for o in other_fundings),
(c.posting.semester for c in contracts),
(c.semester for c in financial_comments),
(get_semester(a.start_date) for a in appointments),
(get_semester(a.end_date) for a in appointments),
(ph.start_semester for ph in program_history),
)
all_semesters = itertools.ifilter(lambda x: isinstance(x, Semester), all_semesters)
all_semesters = set(all_semesters)
if len(all_semesters) == 0:
all_semesters = [get_semester()]
earliest_semester = min(all_semesters)
latest_semester = max(all_semesters)
semesters = []
semesters_qs = Semester.objects.filter(start__gte=earliest_semester.start, end__lte=latest_semester.end).order_by('-start')
current_acad_year = None
# build data structure with funding for each semester
for semester in semesters_qs:
semester_total = decimal.Decimal(0)
yearpos = (semester - grad.start_semester) % 3 # position in academic year: 0 is start of a new academic year for this student
if not current_acad_year or yearpos == 2:
# keep this (mutable) structure that we can alias in each semester and keep running totals
current_acad_year = {'total': 0, 'semcount': 0, 'endsem': semester}
# other funding
other_funding = other_fundings.filter(semester=semester)
other_total = 0
for other in other_funding:
if other.eligible:
other_total += other.amount
semester_total += other.amount
# scholarships
semester_scholarships = scholarships_qs.filter(start_semester__name__lte=semester.name, end_semester__name__gte=semester.name)
semester_eligible_scholarships = semester_scholarships.filter(scholarship_type__eligible=True)
scholarships = []
scholarship_total = 0
for ss in semester_scholarships:
amt = ss.amount/(ss.end_semester-ss.start_semester+1)
scholarship_total += amt
scholarships.append({'scholarship': ss, 'semester_amount': amt})
for semester_eligible_scholarship in semester_eligible_scholarships:
if(semester_eligible_scholarship.start_semester != semester_eligible_scholarship.end_semester):
semester_span = semester_eligible_scholarship.end_semester - semester_eligible_scholarship.start_semester + 1
semester_total += semester_eligible_scholarship.amount/semester_span
else:
semester_total += semester_eligible_scholarship.amount
# grad status
status = None
status_short = None
for s in GradStatus.objects.filter(student=grad, hidden=False):
if s.start <= semester and (s.end == None or semester <= s.end) :
status = s.get_status_display()
status_short = s.get_short_status_display()
# grad program
program = None
for ph in program_history:
if ph.start_semester == semester:
program = ph
# financial comments
comments = []
for c in financial_comments:
if c.semester == semester:
comments.append(c)
# TAs
ta_total = 0
courses = []
for contract in contracts:
if contract.posting.semester == semester:
for course in TACourse.objects.filter(contract=contract).exclude(bu=0).select_related('course'):
ta_total += course.pay()
if contract.status == 'SGN':
text = "%s (%s BU)" % (course.course.name(), course.total_bu)
else:
text = "%s (%s BU, current status: %s)" \
% (course.course.name(), course.total_bu, contract.get_status_display().lower())
courses.append({'course': text,'amount': course.pay()})
for contract in other_contracts:
if contract.category.hiring_semester.semester == semester:
if contract.status == 'SGN':
for course in contract.course.all():
ta_total += course.total
courses.append({'course': "%s (%s BU)" % (course.course.name(), course.total_bu),
'amount': course.total })
else:
for course in contract.course.all():
courses.append({'course': "%s (%s BU - $%.02f) - Draft" % (course.course.name(), course.total_bu, course.total),
'amount': 0 })
ta = {'courses':courses,'amount':ta_total}
semester_total += ta_total
# RAs
ra_total = 0
appt = []
for appointment in appointments:
app_start_sem = appointment.start_semester()
app_end_sem = appointment.end_semester()
length = appointment.semester_length()
if app_start_sem <= semester and app_end_sem >= semester:
sem_pay = appointment.lump_sum_pay/length
ra_total += sem_pay
appt.append({'desc':"RA for %s - %s" % (appointment.hiring_faculty.name(), appointment.project),
'amount':sem_pay, 'semesters': appointment.semester_length() })
ra = {'appt':appt, 'amount':ra_total}
semester_total += ra_total
# promises (ending in this semester, so we display them in the right spot)
try:
promise = promises_qs.filter(end_semester=semester)[0]
except IndexError:
promise = None
current_acad_year['total'] += semester_total
current_acad_year['semcount'] += 1
semester_data = {'semester':semester, 'status':status, 'status_short': status_short, 'scholarships': scholarships,
'promise': promise, 'semester_total': semester_total, 'comments': comments,
'ta': ta, 'ra': ra, 'other_funding': other_funding, 'program': program,
'other_total': other_total, 'scholarship_total': scholarship_total,
'ta_total': ta_total, 'ra_total': ra_total, 'acad_year': current_acad_year}
semesters.append(semester_data)
promises = []
for promise in promises_qs:
received = decimal.Decimal(0)
for semester in semesters:
if semester['semester'] < promise.start_semester or semester['semester'] > promise.end_semester:
continue
received += semester['semester_total']
owing = received - promise.amount
# minor logic for display.
if owing < 0:
owing = abs(owing)
else:
owing = -1
# annotate the semester where we're displaying the promise with relevant info
for semester in semesters:
if semester['semester'] == promise.end_semester:
semester['promisereceived'] = received
semester['promiseowing'] = owing
totals = {'ta': 0, 'ra': 0, 'scholarship': 0, 'other': 0, 'total': 0}
for s in semesters:
totals['ta'] += s['ta_total']
totals['ra'] += s['ra_total']
totals['scholarship'] += s['scholarship_total']
totals['other'] += s['other_total']
totals['total'] += s['semester_total']
context = {
'semesters': semesters,
'promises': promises,
'grad':grad,
'status': current_status,
'unit': units,
'totals': totals,
}
return render(request, 'grad/view_financials-%s.html' % (style), context)
0
Example 47
Project: coinbase-exchange-order-book Source File: strategies.py
def market_maker_strategy(open_orders, order_book, spreads):
time.sleep(10)
open_orders.get_open_orders()
open_orders.cancel_all()
while True:
time.sleep(0.005)
if order_book.asks.price_tree.min_key() - order_book.bids.price_tree.max_key() < 0:
file_logger.warn('Negative spread: {0}'.format(
order_book.asks.price_tree.min_key() - order_book.bids.price_tree.max_key()))
continue
if not open_orders.open_bid_order_id:
open_bid_price = order_book.asks.price_tree.min_key() - spreads.bid_spread - open_orders.open_bid_rejections
if 0.01 * float(open_bid_price) < float(open_orders.accounts['USD']['available']):
order = {'size': '0.01',
'price': str(open_bid_price),
'side': 'buy',
'product_id': 'BTC-USD',
'post_only': True}
response = requests.post(exchange_api_url + 'orders', json=order, auth=exchange_auth)
if 'status' in response.json() and response.json()['status'] == 'pending':
open_orders.open_bid_order_id = response.json()['id']
open_orders.open_bid_price = open_bid_price
open_orders.open_bid_rejections = Decimal('0.0')
file_logger.info('new bid @ {0}'.format(open_bid_price))
elif 'status' in response.json() and response.json()['status'] == 'rejected':
open_orders.open_bid_order_id = None
open_orders.open_bid_price = None
open_orders.open_bid_rejections += Decimal('0.04')
file_logger.warn('rejected: new bid @ {0}'.format(open_bid_price))
elif 'message' in response.json() and response.json()['message'] == 'Insufficient funds':
open_orders.open_bid_order_id = None
open_orders.open_bid_price = None
file_logger.warn('Insufficient USD')
else:
file_logger.error('Unhandled response: {0}'.format(pformat(response.json())))
continue
if not open_orders.open_ask_order_id:
open_ask_price = order_book.bids.price_tree.max_key() + spreads.ask_spread + open_orders.open_ask_rejections
if 0.01 < float(open_orders.accounts['BTC']['available']):
order = {'size': '0.01',
'price': str(open_ask_price),
'side': 'sell',
'product_id': 'BTC-USD',
'post_only': True}
response = requests.post(exchange_api_url + 'orders', json=order, auth=exchange_auth)
if 'status' in response.json() and response.json()['status'] == 'pending':
open_orders.open_ask_order_id = response.json()['id']
open_orders.open_ask_price = open_ask_price
file_logger.info('new ask @ {0}'.format(open_ask_price))
open_orders.open_ask_rejections = Decimal('0.0')
elif 'status' in response.json() and response.json()['status'] == 'rejected':
open_orders.open_ask_order_id = None
open_orders.open_ask_price = None
open_orders.open_ask_rejections += Decimal('0.04')
file_logger.warn('rejected: new ask @ {0}'.format(open_ask_price))
elif 'message' in response.json() and response.json()['message'] == 'Insufficient funds':
open_orders.open_ask_order_id = None
open_orders.open_ask_price = None
file_logger.warn('Insufficient BTC')
else:
file_logger.error('Unhandled response: {0}'.format(pformat(response.json())))
continue
if open_orders.open_bid_order_id and not open_orders.open_bid_cancelled:
bid_too_far_out = open_orders.open_bid_price < (order_book.asks.price_tree.min_key()
- spreads.bid_too_far_adjustment_spread)
bid_too_close = open_orders.open_bid_price > (order_book.bids.price_tree.max_key()
- spreads.bid_too_close_adjustment_spread)
cancel_bid = bid_too_far_out or bid_too_close
if cancel_bid:
if bid_too_far_out:
file_logger.info('CANCEL: open bid {0} too far from best ask: {1} spread: {2}'.format(
open_orders.open_bid_price,
order_book.asks.price_tree.min_key(),
open_orders.open_bid_price - order_book.asks.price_tree.min_key()))
if bid_too_close:
file_logger.info('CANCEL: open bid {0} too close to best bid: {1} spread: {2}'.format(
open_orders.open_bid_price,
order_book.bids.price_tree.max_key(),
open_orders.open_bid_price - order_book.bids.price_tree.max_key()))
open_orders.cancel('bid')
continue
if open_orders.open_ask_order_id and not open_orders.open_ask_cancelled:
ask_too_far_out = open_orders.open_ask_price > (order_book.bids.price_tree.max_key() +
spreads.ask_too_far_adjustment_spread)
ask_too_close = open_orders.open_ask_price < (order_book.asks.price_tree.min_key() -
spreads.ask_too_close_adjustment_spread)
cancel_ask = ask_too_far_out or ask_too_close
if cancel_ask:
if ask_too_far_out:
file_logger.info('CANCEL: open ask {0} too far from best bid: {1} spread: {2}'.format(
open_orders.open_ask_price,
order_book.bids.price_tree.max_key(),
open_orders.open_ask_price - order_book.bids.price_tree.max_key()))
if ask_too_close:
file_logger.info('CANCEL: open ask {0} too close to best ask: {1} spread: {2}'.format(
open_orders.open_ask_price,
order_book.asks.price_tree.min_key(),
open_orders.open_ask_price - order_book.asks.price_tree.min_key()))
open_orders.cancel('ask')
continue
0
Example 48
Project: GAE-Bulk-Mailer Source File: defaultfilters.py
@register.filter(is_safe=True)
def floatformat(text, arg=-1):
"""
Displays a float to a specified number of decimal places.
If called without an argument, it displays the floating point number with
one decimal place -- but only if there's a decimal place to be displayed:
* num1 = 34.23234
* num2 = 34.00000
* num3 = 34.26000
* {{ num1|floatformat }} displays "34.2"
* {{ num2|floatformat }} displays "34"
* {{ num3|floatformat }} displays "34.3"
If arg is positive, it will always display exactly arg number of decimal
places:
* {{ num1|floatformat:3 }} displays "34.232"
* {{ num2|floatformat:3 }} displays "34.000"
* {{ num3|floatformat:3 }} displays "34.260"
If arg is negative, it will display arg number of decimal places -- but
only if there are places to be displayed:
* {{ num1|floatformat:"-3" }} displays "34.232"
* {{ num2|floatformat:"-3" }} displays "34"
* {{ num3|floatformat:"-3" }} displays "34.260"
If the input float is infinity or NaN, the (platform-dependent) string
representation of that value will be displayed.
"""
try:
input_val = force_text(text)
d = Decimal(input_val)
except UnicodeEncodeError:
return ''
except InvalidOperation:
if input_val in special_floats:
return input_val
try:
d = Decimal(force_text(float(text)))
except (ValueError, InvalidOperation, TypeError, UnicodeEncodeError):
return ''
try:
p = int(arg)
except ValueError:
return input_val
try:
m = int(d) - d
except (ValueError, OverflowError, InvalidOperation):
return input_val
if not m and p < 0:
return mark_safe(formats.number_format('%d' % (int(d)), 0))
if p == 0:
exp = Decimal(1)
else:
exp = Decimal('1.0') / (Decimal(10) ** abs(p))
try:
# Set the precision high enough to avoid an exception, see #15789.
tupl = d.as_tuple()
units = len(tupl[1]) - tupl[2]
prec = abs(p) + units + 1
# Avoid conversion to scientific notation by accessing `sign`, `digits`
# and `exponent` from `Decimal.as_tuple()` directly.
sign, digits, exponent = d.quantize(exp, ROUND_HALF_UP,
Context(prec=prec)).as_tuple()
digits = [six.text_type(digit) for digit in reversed(digits)]
while len(digits) <= abs(exponent):
digits.append('0')
digits.insert(-exponent, '.')
if sign:
digits.append('-')
number = ''.join(reversed(digits))
return mark_safe(formats.number_format(number, abs(p)))
except InvalidOperation:
return input_val
0
Example 49
Project: pretix Source File: cart.py
def _add_new_items(event: Event, items: List[dict],
cart_id: str, expiry: datetime, now_dt: datetime) -> Optional[str]:
err = None
# Fetch items from the database
items_query = Item.objects.filter(event=event, id__in=[i['item'] for i in items]).prefetch_related(
"quotas")
items_cache = {i.id: i for i in items_query}
variations_query = ItemVariation.objects.filter(
item__event=event,
id__in=[i['variation'] for i in items if i['variation'] is not None]
).select_related("item", "item__event").prefetch_related("quotas")
variations_cache = {v.id: v for v in variations_query}
for i in items:
# Check whether the specified items are part of what we just fetched from the database
# If they are not, the user supplied item IDs which either do not exist or belong to
# a different event
if i['item'] not in items_cache or (i['variation'] is not None and i['variation'] not in variations_cache):
err = err or error_messages['not_for_sale']
continue
item = items_cache[i['item']]
variation = variations_cache[i['variation']] if i['variation'] is not None else None
# Check whether a voucher has been provided
voucher = None
if i.get('voucher'):
try:
voucher = Voucher.objects.get(code=i.get('voucher').strip(), event=event)
if voucher.redeemed:
return error_messages['voucher_redeemed']
if voucher.valid_until is not None and voucher.valid_until < now_dt:
return error_messages['voucher_expired']
if not voucher.applies_to(item, variation):
return error_messages['voucher_invalid_item']
doubleuse = CartPosition.objects.filter(voucher=voucher, cart_id=cart_id, event=event)
if 'cp' in i:
doubleuse = doubleuse.exclude(pk=i['cp'].pk)
if doubleuse.exists():
return error_messages['voucher_double']
except Voucher.DoesNotExist:
return error_messages['voucher_invalid']
# Fetch all quotas. If there are no quotas, this item is not allowed to be sold.
quotas = list(item.quotas.all()) if variation is None else list(variation.quotas.all())
if voucher and voucher.quota and voucher.quota.pk not in [q.pk for q in quotas]:
return error_messages['voucher_invalid_item']
if item.require_voucher and voucher is None:
return error_messages['voucher_required']
if item.hide_without_voucher and (voucher is None or voucher.item is None or voucher.item.pk != item.pk):
return error_messages['voucher_required']
if len(quotas) == 0 or not item.is_available() or (variation and not variation.active):
err = err or error_messages['unavailable']
continue
# Check that all quotas allow us to buy i['count'] instances of the object
quota_ok = i['count']
if not voucher or (not voucher.allow_ignore_quota and not voucher.block_quota):
for quota in quotas:
avail = quota.availability()
if avail[1] is not None and avail[1] < i['count']:
# This quota is not available or less than i['count'] items are left, so we have to
# reduce the number of bought items
if avail[0] != Quota.AVAILABILITY_OK:
err = err or error_messages['unavailable']
else:
err = err or error_messages['in_part']
quota_ok = min(quota_ok, avail[1])
if voucher and voucher.price is not None:
price = voucher.price
else:
price = item.default_price if variation is None else (
variation.default_price if variation.default_price is not None else item.default_price)
if item.free_price and 'price' in i and i['price'] is not None and i['price'] != "":
custom_price = i['price']
if not isinstance(custom_price, Decimal):
custom_price = Decimal(custom_price.replace(",", "."))
if custom_price > 100000000:
return error_messages['price_too_high']
price = max(custom_price, price)
# Create a CartPosition for as much items as we can
for k in range(quota_ok):
if 'cp' in i and i['count'] == 1:
# Recreating
cp = i['cp']
cp.expires = expiry
cp.price = price
cp.save()
else:
CartPosition.objects.create(
event=event, item=item, variation=variation,
price=price,
expires=expiry,
cart_id=cart_id, voucher=voucher
)
return err
0
Example 50
def get_fixtures(n=None):
"""
Returns `n` dictionaries of `Person` objects.
If `n` is not specified it defaults to 6.
"""
_now = timezone.now().replace(microsecond=0) # mysql doesn't do microseconds. # NOQA
_date = date(2015, 3, 28)
_time = time(13, 0)
fixtures = [
{
'big_age': 59999999999999999, 'comma_separated_age': '1,2,3',
'age': -99, 'positive_age': 9999, 'positive_small_age': 299,
'small_age': -299, 'certified': False, 'null_certified': None,
'name': 'Mike', 'email': '[email protected]',
'file_path': '/Users/user/fixtures.json', 'slug': 'mike',
'text': 'here is a dummy text',
'url': 'https://docs.djangoproject.com',
'height': Decimal('1.81'), 'date_time': _now,
'date': _date, 'time': _time, 'float_height': 0.3,
'remote_addr': '192.0.2.30', 'my_file': 'dummy.txt',
'image': 'kitten.jpg', 'data': {'name': 'Mike', 'age': -99},
},
{
'big_age': 245999992349999, 'comma_separated_age': '6,2,9',
'age': 25, 'positive_age': 49999, 'positive_small_age': 315,
'small_age': 5409, 'certified': False, 'null_certified': True,
'name': 'Pete', 'email': '[email protected]',
'file_path': 'users.json', 'slug': 'pete', 'text': 'dummy',
'url': 'https://google.com', 'height': Decimal('1.93'),
'date_time': _now, 'date': _date, 'time': _time,
'float_height': 0.5, 'remote_addr': '127.0.0.1',
'my_file': 'fixtures.json',
'data': [{'name': 'Pete'}, {'name': 'Mike'}],
},
{
'big_age': 9929992349999, 'comma_separated_age': '6,2,9,10,5',
'age': 29, 'positive_age': 412399, 'positive_small_age': 23315,
'small_age': -5409, 'certified': False, 'null_certified': True,
'name': 'Ash', 'email': '[email protected]',
'file_path': '/Downloads/kitten.jpg', 'slug': 'ash',
'text': 'bla bla bla', 'url': 'news.ycombinator.com',
'height': Decimal('1.78'), 'date_time': _now,
'date': _date, 'time': _time,
'float_height': 0.8, 'my_file': 'dummy.png',
'data': {'text': 'bla bla bla', 'names': ['Mike', 'Pete']},
},
{
'big_age': 9992349234, 'comma_separated_age': '12,29,10,5',
'age': -29, 'positive_age': 4199, 'positive_small_age': 115,
'small_age': 909, 'certified': True, 'null_certified': False,
'name': 'Mary', 'email': '[email protected]',
'file_path': 'dummy.png', 'slug': 'mary',
'text': 'bla bla bla bla bla', 'url': 'news.ycombinator.com',
'height': Decimal('1.65'), 'date_time': _now,
'date': _date, 'time': _time, 'float_height': 0,
'remote_addr': '2a02:42fe::4',
'data': {'names': {'name': 'Mary'}},
},
{
'big_age': 999234, 'comma_separated_age': '12,1,30,50',
'age': 1, 'positive_age': 99199, 'positive_small_age': 5,
'small_age': -909, 'certified': False, 'null_certified': False,
'name': 'Sandra', 'email': '[email protected]',
'file_path': '/home/dummy.png', 'slug': 'sandra',
'text': 'this is a dummy text', 'url': 'google.com',
'height': Decimal('1.59'), 'date_time': _now,
'date': _date, 'time': _time, 'float_height': 2 ** 2,
'image': 'dummy.jpeg', 'data': {},
},
{
'big_age': 9999999999, 'comma_separated_age': '1,100,3,5',
'age': 35, 'positive_age': 1111, 'positive_small_age': 500,
'small_age': 110, 'certified': True, 'null_certified': None,
'name': 'Crystal', 'email': '[email protected]',
'file_path': '/home/dummy.txt', 'slug': 'crystal',
'text': 'dummy text', 'url': 'docs.djangoproject.com',
'height': Decimal('1.71'), 'date_time': _now,
'date': _date, 'time': _time, 'float_height': 2 ** 10,
'image': 'dummy.png', 'data': [],
},
]
n = n or len(fixtures)
fixtures = itertools.cycle(fixtures)
for _ in xrange(n):
yield next(fixtures)