Here are the examples of the python api django.core.cache.cache.get taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
437 Examples
3
Source : backends.py
with MIT License
from a1401358759
with MIT License
from a1401358759
def get_articles(key):
articles = []
if cache.get(key):
articles = cache.get(key)
else:
articles = Article.objects.filter(status=BlogStatus.PUBLISHED).order_by("-publish_time")
if articles:
cache.set(key, articles, None)
return articles
def get_tags_and_musics(tag_key, music_key):
3
Source : backends.py
with MIT License
from a1401358759
with MIT License
from a1401358759
def get_popular_top10_blogs(key):
new_post = []
if cache.get(key):
new_post = cache.get(key)
else:
new_post = Article.objects.filter(status=BlogStatus.PUBLISHED).order_by('-count')[:8]
if new_post:
cache.set(key, list(new_post), 24 * 3600) # 缓存24小时
return new_post
def get_classifications(key):
3
Source : backends.py
with MIT License
from a1401358759
with MIT License
from a1401358759
def get_classifications(key):
classification = []
if cache.get(key):
classification = cache.get(key)
else:
classification = Classification.class_list.get_classify_list()
if classification:
cache.set(key, classification, None)
return classification
def get_date_list(key):
3
Source : backends.py
with MIT License
from a1401358759
with MIT License
from a1401358759
def get_date_list(key):
date_list = []
if cache.get(key):
date_list = cache.get(key)
else:
date_list = Article.date_list.get_article_by_date()
if date_list:
cache.set(key, date_list, 24 * 3600) # 缓存24小时
return date_list
def get_archieve(key):
3
Source : backends.py
with MIT License
from a1401358759
with MIT License
from a1401358759
def get_archieve(key):
archieve = []
if cache.get(key):
archieve = cache.get(key)
else:
archieve = Article.date_list.get_article_by_archive()
if archieve:
cache.set(key, archieve, 24 * 3600) # 缓存24小时
return archieve
def get_links(key):
3
Source : backends.py
with MIT License
from a1401358759
with MIT License
from a1401358759
def get_links(key):
links = []
if cache.get(key):
links = cache.get(key)
else:
links = list(Links.objects.all())
if links:
cache.set(key, links, None)
return links
def get_carousel_imgs(key, img_type):
3
Source : backends.py
with MIT License
from a1401358759
with MIT License
from a1401358759
def get_carousel_imgs(key, img_type):
carouse_imgs = []
if cache.get(key):
carouse_imgs = cache.get(key)
else:
carouse_imgs = CarouselImg.objects.filter(img_type=img_type).order_by("-weights", "id")
if carouse_imgs:
cache.set(key, carouse_imgs, None)
return carouse_imgs
def get_cache_comments(key):
3
Source : backends.py
with MIT License
from a1401358759
with MIT License
from a1401358759
def get_cache_comments(key):
comments = []
if cache.get(key):
comments = cache.get(key)
else:
comments = Comments.objects.select_related().filter(target=key).order_by('-id')
if comments:
cache.set(key, comments, None)
return comments
def gravatar_url(email, size=40):
3
Source : admin.py
with MIT License
from abrookins
with MIT License
from abrookins
def goals_dashboard_view_redis(self, request):
key = redis_key_schema.admin_goals_dashboard()
cached_result = cache.get(key)
if not cached_result:
dashboard = self.goals_dashboard_view_sql(request)
cache.set(key, dashboard, timeout=ONE_HOUR)
return dashboard
return cached_result
# end::caching-view-in-redis[]
# tag::aggregations[]
def goals_avg_completions_view(self, request):
3
Source : views.py
with GNU General Public License v3.0
from Archmonger
with GNU General Public License v3.0
from Archmonger
def generate_invite_code(request):
# Create an invite code that doesn't already exist
while True:
invite_code = token_hex(12)
cache_key = "invite_code" + invite_code
if cache.get(cache_key) is None:
cache.set(cache_key, True, INVITE_CODE_DURATION)
break
return JsonResponse({"invite_code": invite_code})
3
Source : admin.py
with GNU Lesser General Public License v3.0
from auto-mat
with GNU Lesser General Public License v3.0
from auto-mat
def job_status_info(self, obj):
job_status = cache.get(self.direction + "_job_status_%s" % obj.pk)
if job_status:
return job_status
else:
return obj.job_status
class ImportJobForm(forms.ModelForm):
3
Source : serializers.py
with GNU Affero General Public License v3.0
from avantifellows
with GNU Affero General Public License v3.0
from avantifellows
def to_representation(self, instance):
# check if a cached version exists and if it does, return it as the response
cache_key = get_cache_key(instance)
cached_response = cache.get(cache_key)
if cached_response:
return cached_response
response = super().to_representation(instance)
response["video"] = VideoSerializer(instance.video).data
response["created_by"] = UserSerializer(instance.created_by).data
response["items"] = ItemSerializer(instance.item_set, many=True).data
cache.set(cache_key, response) # set a cached version
return response
class ItemSerializer(serializers.ModelSerializer):
3
Source : models.py
with GNU General Public License v3.0
from CMSgov
with GNU General Public License v3.0
from CMSgov
def get_logo(self):
# Cache the logo for a bit since it's loaded on every page load.
from django.core.cache import cache
cache_key = "org_logo_{}".format(self.id)
logo = cache.get(cache_key)
if not logo:
prj_task = self.get_organization_project().root_task
profile_task = prj_task.get_subtask("organization_profile")
if profile_task:
profile = profile_task.get_answers().as_dict()
logo = profile.get("logo")
else:
logo = None
cache.set(cache_key, logo, 60 * 10) # 10 minutes
return logo
@staticmethod
3
Source : mixins.py
with Apache License 2.0
from Code4PuertoRico
with Apache License 2.0
from Code4PuertoRico
def dispatch(self, request, *args, **kwargs):
cache_key = get_cache_key(request)
response = cache.get(cache_key)
if response:
return response
response = super().dispatch(request, *args, **kwargs)
if response.status_code == 200:
response.add_post_render_callback(lambda r: cache_response(r, cache_key))
return response
3
Source : utils.py
with MIT License
from dafi-um
with MIT License
from dafi-um
def get_domain() -> str:
site_url = cache.get('main_domain')
if not site_url:
site_url = 'https://' + get_current_site(None).domain
cache.set('main_domain', site_url)
return site_url
def get_url(*args, **kwargs) -> str:
3
Source : models.py
with MIT License
from dafi-um
with MIT License
from dafi-um
def get_grouped(cls):
d = cache.get('grouped_subjects')
if not d:
d = {}
for s in cls.objects.all():
if s.year not in d:
d[s.year] = []
d[s.year].append(s)
cache.set('grouped_subjects', d)
return d
class Year:
3
Source : backend.py
with BSD 2-Clause "Simplified" License
from django-auth-ldap
with BSD 2-Clause "Simplified" License
from django-auth-ldap
def _load_cached_attr(self, attr_name):
if self.settings.CACHE_TIMEOUT > 0:
key = self._cache_key(attr_name)
value = cache.get(key)
setattr(self, attr_name, value)
def _cache_attr(self, attr_name):
3
Source : tests.py
with BSD 2-Clause "Simplified" License
from django-auth-ldap
with BSD 2-Clause "Simplified" License
from django-auth-ldap
def test_dn_not_cached(self, mock):
self._init_settings(
USER_SEARCH=LDAPSearch(
"ou=people,o=test", ldap.SCOPE_SUBTREE, "(uid=%(user)s)"
)
)
for _ in range(2):
user = authenticate(username="alice", password="password")
self.assertIsNotNone(user)
# Should have executed once per auth.
self.assertEqual(mock.call_count, 2)
# DN is not cached.
self.assertIsNone(cache.get("django_auth_ldap.user_dn.alice"))
@spy_ldap("search_s")
3
Source : snippet.py
with Apache License 2.0
from dockerizeme
with Apache License 2.0
from dockerizeme
def process_view(self, request, view_func, view_args, view_kwargs):
response = None
if request.method == 'GET' and 'magicflag' not in request.GET:
cache_key = urllib.quote(request.path)
response = cache.get(cache_key, None)
if response is None:
response = view_func(request, *view_args, **view_kwargs)
if 'magicflag' not in request.GET and response['content-type'].startswith('text/html'):
t = Template(response.content)
response.content = t.render(RequestContext(request))
return response
3
Source : snippet.py
with Apache License 2.0
from dockerizeme
with Apache License 2.0
from dockerizeme
def ratelimit(limit=10,length=86400):
""" The length is in seconds and defaults to a day"""
def decorator(func):
def inner(request, *args, **kwargs):
ip_hash = str(hash(request.META['REMOTE_ADDR']))
result = cache.get(ip_hash)
if result:
result = int(result)
if result == limit:
return HttpResponseForbidden("Ooops too many requests today!")
else:
result +=1
cache.set(ip_hash,result,length)
return func(request,*args,**kwargs)
cache.add(ip_hash,1,length)
return func(request, *args, **kwargs)
return wraps(func, assigned=available_attrs(func))(inner)
return decorator
3
Source : snippet.py
with Apache License 2.0
from dockerizeme
with Apache License 2.0
from dockerizeme
def get(name):
'''Returns a counter value for counter name
Results are cached after first request'''
result = cache.get(CACHE_KEY + name)
result = None
if result is None:
results = CounterShard.objects.filter(name=name).values()
result = results.aggregate(count=Sum('count'))['count'] or 0
cache.add(CACHE_KEY + name, result, CACHE_TIMEOUT)
return result
def _update(name, delta):
3
Source : snippet.py
with Apache License 2.0
from dockerizeme
with Apache License 2.0
from dockerizeme
def load(cls):
if cache.get(self.__class__.__name__) is None:
obj, created = cls.objects.get_or_create(pk=1)
if not created:
obj.set_cache()
return cache.get(self.__class__.__name__)
3
Source : models.py
with MIT License
from dtcooper
with MIT License
from dtcooper
def queue_autodj_request(self):
requests = cache.get(constants.CACHE_KEY_AUTODJ_REQUESTS, [])
if len(requests) >= config.AUTODJ_REQUESTS_NUM or self.id in requests:
logger.info(f"attempted to make autodj request {self}, but queue full or request exists")
return False
else:
requests.append(self.id)
cache.set(
constants.CACHE_KEY_AUTODJ_REQUESTS,
requests,
timeout=REQUESTS_CACHE_TIMEOUT,
)
logger.info(f"queue autodj request {self}")
return True
@classmethod
3
Source : models.py
with MIT License
from dtcooper
with MIT License
from dtcooper
def task_log_line(self):
# This is property cached for the lifetime of the object so it isn't read twice with
# different values by admin
if self.status == self.Status.PROCESSING and self.task_id:
return cache.get(f"{constants.CACHE_KEY_ASSET_TASK_LOG_PREFIX}{self.task_id}")
def set_task_log_line(self, log_line):
3
Source : tasks.py
with MIT License
from dtcooper
with MIT License
from dtcooper
def install_youtube_dl():
if not (cache.get(constants.CACHE_KEY_YTDL_UP2DATE) and shutil.which(YOUTUBE_DL_CMD)):
logger.info("updating youtube-dl...")
subprocess.run(
["pip", "install", "--no-cache-dir", "--upgrade", YOUTUBE_DL_PKG],
check=True,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
)
cache.set(constants.CACHE_KEY_YTDL_UP2DATE, True, timeout=60 * 60 * 23)
logger.info("youtube-dl up to date!")
@djhuey.periodic_task(priority=2, validate_datetime=once_at_startup(local_daily_task(hour=4)))
3
Source : admin.py
with MIT License
from dtcooper
with MIT License
from dtcooper
def get_initial(self):
initial = super().get_initial()
custom_config = cache.get(constants.CACHE_KEY_HARBOR_CONFIG_CONTEXT)
if isinstance(custom_config, dict):
initial.update(custom_config)
return initial
def form_valid(self, form):
3
Source : views.py
with MIT License
from enjoy-binbin
with MIT License
from enjoy-binbin
def get_queryset(self):
# test_add.delay(5, 5) # celery测试, 里面有睡了5秒, 但是异步体验不到, 看celery控制台的输出
queryset = cache.get(self.cache_key) # 查询缓存
if not queryset: # 缓存没命中会返回 None
queryset = super().get_queryset() # 调用父类的方法
cache.set(self.cache_key, queryset) # 设置缓存
# qs会根据model里定义的 ordering排序
return queryset
# def get_queryset(self):
# queryset = super().get_queryset() # 调用父类的方法
# # qs会根据model里定义的 ordering排序
# return queryset
@property
3
Source : views.py
with MIT License
from enjoy-binbin
with MIT License
from enjoy-binbin
def get_queryset(self):
queryset = cache.get(self.cache_key)
if not queryset:
slug = self.kwargs['slug']
category = get_object_or_404(Category, slug=slug)
self.object_name = category.name
all_category_name = list(map(lambda c: c.name, category.get_sub_categorys()))
queryset = Article.objects.filter(category__name__in=all_category_name).exclude(status="hide")
cache.set(self.cache_key, queryset)
return queryset
@property
3
Source : views.py
with MIT License
from enjoy-binbin
with MIT License
from enjoy-binbin
def get_queryset(self):
queryset = cache.get(self.cache_key)
if not queryset:
tag_id = self.kwargs['tag_id']
tag = get_object_or_404(Tag, id=tag_id)
queryset = Article.objects.filter(tags=tag).exclude(status="hide")
cache.set(self.cache_key, queryset)
return queryset
@property
3
Source : views.py
with MIT License
from enjoy-binbin
with MIT License
from enjoy-binbin
def get_queryset(self):
queryset = cache.get(self.cache_key)
if not queryset:
author_name = self.kwargs['author_name']
queryset = Article.objects.filter(author__username=author_name)
cache.set(self.cache_key, queryset)
return queryset
@property
3
Source : feeds.py
with GNU Affero General Public License v3.0
from epilys
with GNU Affero General Public License v3.0
from epilys
def items(self):
latest = cache.get("latest_stories_latest")
try:
actual_latest = (
Story.objects.exclude(active=False).latest("created").created
)
except Story.DoesNotExist:
actual_latest = date.fromtimestamp(0)
items = cache.get("latest_stories")
if items is None or (latest is not None and latest != actual_latest):
items = Story.objects.exclude(active=False).order_by("-created")[:10]
cache.set("latest_stories", items)
cache.set("latest_stories_latest", actual_latest)
return items
def item_title(self, item):
3
Source : feeds.py
with GNU Affero General Public License v3.0
from epilys
with GNU Affero General Public License v3.0
from epilys
def items(self):
latest = cache.get(self.latest_key)
try:
actual_latest = self.user.frontpage()["stories"].latest("created").created
except Story.DoesNotExist:
actual_latest = date.fromtimestamp(0)
items = cache.get(self.cache_key)
if items is None or (latest is not None and latest != actual_latest):
items = Story.objects.exclude(active=False).order_by("-created")[:10]
cache.set(self.cache_key, items)
cache.set(self.latest_key, actual_latest)
return items
def __call__(self, request, *args, **kwargs):
3
Source : models.py
with GNU Affero General Public License v3.0
from epilys
with GNU Affero General Public License v3.0
from epilys
def stories_count(self):
key = f"{self.pk}-stories-count"
cached = cache.get(key)
if cached is None:
cached = self.stories.count()
cache.set(key, cached, timeout=60 * 60 * 24)
return cached
class Meta:
3
Source : utils.py
with GNU Affero General Public License v3.0
from epilys
with GNU Affero General Public License v3.0
from epilys
def get_from_cache(key: str):
print(key, cache.get(key))
return cache.get(key)
@register.simple_tag(takes_context=False)
3
Source : middleware.py
with MIT License
from everhide
with MIT License
from everhide
def process_request(self, request):
if Header.TRACE in request.META:
trace_id = request.META.get(Header.TRACE, 0)
cached_data = cache.get(trace_id, {})
json_response = {
'data': cached_data,
'status': 200 if cached_data else 404,
'json_dumps_params': {'default': serial}
}
return JsonResponse(**json_response)
elif Header.REQUEST in request.META:
self.manager.clear()
self.data = {}
self.request_id = request.META.get(Header.REQUEST, 0)
def process_template_response(self, request, response):
3
Source : sites.py
with GNU General Public License v3.0
from foonsun
with GNU General Public License v3.0
from foonsun
def login(self, request, extra_context=None):
key = AdminSite.GenCaptchaKey(request)
count = int(cache.get(key, 0))
if count < 3:
self.login_form = None
else:
self.login_form = AdminAuthenticationForm
if request.method == 'POST':
cache.set(key, count+1, 30)
return super(AdminSite, self).login(request, extra_context)
def logged_clean_captcha_cache(sender, request, user, **kwargs):
3
Source : abstract_models.py
with GNU General Public License v3.0
from foonsun
with GNU General Public License v3.0
from foonsun
def get_absolute_url(self):
"""
Our URL scheme means we have to look up the category's ancestors. As
that is a bit more expensive, we cache the generated URL. That is
safe even for a stale cache, as the default implementation of
ProductCategoryView does the lookup via primary key anyway. But if
you change that logic, you'll have to reconsider the caching
approach.
"""
cache_key = self.get_url_cache_key()
url = cache.get(cache_key)
if not url:
url = reverse(
'catalogue:category',
kwargs={'category_slug': self.full_slug, 'pk': self.pk})
cache.set(cache_key, url)
return url
class Meta:
3
Source : django.py
with MIT License
from frontendr
with MIT License
from frontendr
def get_stored_response(self, request):
cache_key = get_cache_key(request)
cached = cache.get(cache_key, default=None, version=self.options.get("VERSION"))
if cached is not None:
return cached["response"], cached["meta"]
return None, None
def store_response(self, request, response, meta):
3
Source : models.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def _lookup_permission(self, app, action):
# We cache it instead of doing HuePermission.objects.get(app=app, action=action). To revert with Django 1.6
perms = cache.get('perms')
if not perms:
perms = dict([('%s:%s' % (p.app, p.action), p) for p in HuePermission.objects.all()])
cache.set('perms', perms, 60 * 60)
return perms.get('%s:%s' % (app, action))
def has_hue_permission(self, action=None, app=None, perm=None):
3
Source : tests.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_non_existent(self):
"Non-existent keys aren't found in the dummy cache backend"
self.assertIsNone(cache.get("does_not_exist"))
self.assertEqual(cache.get("does_not_exist", "bang!"), "bang!")
def test_get_many(self):
3
Source : tests.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_non_existent(self):
# Non-existent cache keys return as None/default
# get with non-existent keys
self.assertIsNone(cache.get("does_not_exist"))
self.assertEqual(cache.get("does_not_exist", "bang!"), "bang!")
def test_get_many(self):
3
Source : tests.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_long_timeout(self):
"""
Followe memcached's convention where a timeout greater than 30 days is
treated as an absolute expiration timestamp instead of a relative
offset (#12399).
"""
cache.set('key1', 'eggs', 60 * 60 * 24 * 30 + 1) # 30 days + 1 second
self.assertEqual(cache.get('key1'), 'eggs')
cache.add('key2', 'ham', 60 * 60 * 24 * 30 + 1)
self.assertEqual(cache.get('key2'), 'ham')
cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 60 * 60 * 24 * 30 + 1)
self.assertEqual(cache.get('key3'), 'sausage')
self.assertEqual(cache.get('key4'), 'lobster bisque')
def test_forever_timeout(self):
3
Source : tests.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_get_or_set(self):
self.assertIsNone(cache.get('projector'))
self.assertEqual(cache.get_or_set('projector', 42), 42)
self.assertEqual(cache.get('projector'), 42)
self.assertEqual(cache.get_or_set('null', None), None)
def test_get_or_set_callable(self):
3
Source : tests.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_get_does_not_ignore_non_enoent_errno_values(self):
with mock.patch.object(io, 'open', side_effect=IOError):
with self.assertRaises(IOError):
cache.get('foo')
@override_settings(CACHES={
3
Source : backend.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def _load_cached_attr(self, attr_name):
if self.settings.CACHE_GROUPS:
key = self._cache_key(attr_name)
value = cache.get(key)
setattr(self, attr_name, value)
def _cache_attr(self, attr_name):
3
Source : backend.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def cache_get_or_set(cache, key, default, timeout=None):
"""
Backport of Django 1.9's cache.get_or_set.
"""
value = cache.get(key)
if value is None:
value = default() if callable(default) else default
cache.set(key, value, timeout)
return value
3
Source : privilege_checker.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def get_checker(user, checker=None):
cache_key = SENTRY_PRIVILEGE_CACHE_KEY % {'username': user.username}
checker = checker or cache.get(cache_key)
if not checker:
checker = PrivilegeChecker(user=user)
cache.set(cache_key, checker, PRIVILEGE_CHECKER_CACHING.get())
return checker
class PrivilegeChecker(object):
3
Source : optimizer_client.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def _get_tenant_id(api):
tenant_id = OPTIMIZER.TENANT_ID.get() or cache.get(OPTIMIZER_TENANT_ID_CACHE_KEY)
if not tenant_id:
tenant = api.get_tenant(cluster_id=OPTIMIZER.CLUSTER_ID.get())
if tenant.get('tenant'):
tenant_id = tenant['tenant']
else:
raise PopupException(_('Could not get tenant id from cluster id %s: %s') % (OPTIMIZER.CLUSTER_ID.get(), tenant))
cache.set(OPTIMIZER_TENANT_ID_CACHE_KEY, tenant_id, 60 * 60 * 24 * 30)
return tenant_id
3
Source : api.py
with GNU General Public License v2.0
from getway
with GNU General Public License v2.0
from getway
def get(self, request):
token = request.query_params.get('token')
user_only = request.query_params.get('user-only', None)
value = cache.get(token, None)
if not value:
return Response('', status=404)
if not user_only:
return Response(value)
else:
return Response({'user': value['user']})
def get_permissions(self):
3
Source : authentication.py
with GNU General Public License v2.0
from getway
with GNU General Public License v2.0
from getway
def authenticate_credentials(token):
user_id = cache.get(token)
user = get_object_or_none(User, id=user_id)
if not user:
msg = _('Invalid token or cache refreshed.')
raise exceptions.AuthenticationFailed(msg)
refresh_token(token, user)
return user, None
class PrivateTokenAuthentication(authentication.TokenAuthentication):
See More Examples