repository_name
stringlengths
5
67
func_path_in_repository
stringlengths
4
234
func_name
stringlengths
0
314
whole_func_string
stringlengths
52
3.87M
language
stringclasses
6 values
func_code_string
stringlengths
52
3.87M
func_documentation_string
stringlengths
1
47.2k
func_code_url
stringlengths
85
339
lwcolton/falcon-cors
src/falcon_cors/__init__.py
CORS._process_credentials
def _process_credentials(self, req, resp, origin): """Adds the Access-Control-Allow-Credentials to the response if the cors settings indicates it should be set. """ if self._cors_config['allow_credentials_all_origins']: self._set_allow_credentials(resp) return True if origin in self._cors_config['allow_credentials_origins_list']: self._set_allow_credentials(resp) return True credentials_regex = self._cors_config['allow_credentials_origins_regex'] if credentials_regex: if credentials_regex.match(origin): self._set_allow_credentials(resp) return True return False
python
def _process_credentials(self, req, resp, origin): """Adds the Access-Control-Allow-Credentials to the response if the cors settings indicates it should be set. """ if self._cors_config['allow_credentials_all_origins']: self._set_allow_credentials(resp) return True if origin in self._cors_config['allow_credentials_origins_list']: self._set_allow_credentials(resp) return True credentials_regex = self._cors_config['allow_credentials_origins_regex'] if credentials_regex: if credentials_regex.match(origin): self._set_allow_credentials(resp) return True return False
Adds the Access-Control-Allow-Credentials to the response if the cors settings indicates it should be set.
https://github.com/lwcolton/falcon-cors/blob/9e1243829078e4c6f8fb8bb895b5cad62bce9d6b/src/falcon_cors/__init__.py#L396-L414
azavea/django-amazon-ses
django_amazon_ses.py
EmailBackend.send_messages
def send_messages(self, email_messages): """Sends one or more EmailMessage objects and returns the number of email messages sent. Args: email_messages: A list of Django EmailMessage objects. Returns: An integer count of the messages sent. Raises: ClientError: An interaction with the Amazon SES HTTP API failed. """ if not email_messages: return sent_message_count = 0 for email_message in email_messages: if self._send(email_message): sent_message_count += 1 return sent_message_count
python
def send_messages(self, email_messages): """Sends one or more EmailMessage objects and returns the number of email messages sent. Args: email_messages: A list of Django EmailMessage objects. Returns: An integer count of the messages sent. Raises: ClientError: An interaction with the Amazon SES HTTP API failed. """ if not email_messages: return sent_message_count = 0 for email_message in email_messages: if self._send(email_message): sent_message_count += 1 return sent_message_count
Sends one or more EmailMessage objects and returns the number of email messages sent. Args: email_messages: A list of Django EmailMessage objects. Returns: An integer count of the messages sent. Raises: ClientError: An interaction with the Amazon SES HTTP API failed.
https://github.com/azavea/django-amazon-ses/blob/668c2e240ee643d02294d28966a9d44cf30dfc7f/django_amazon_ses.py#L58-L78
azavea/django-amazon-ses
django_amazon_ses.py
EmailBackend._send
def _send(self, email_message): """Sends an individual message via the Amazon SES HTTP API. Args: email_message: A single Django EmailMessage object. Returns: True if the EmailMessage was sent successfully, otherwise False. Raises: ClientError: An interaction with the Amazon SES HTTP API failed. """ pre_send.send(self.__class__, message=email_message) if not email_message.recipients(): return False from_email = sanitize_address(email_message.from_email, email_message.encoding) recipients = [sanitize_address(addr, email_message.encoding) for addr in email_message.recipients()] message = email_message.message().as_bytes(linesep='\r\n') try: result = self.conn.send_raw_email( Source=from_email, Destinations=recipients, RawMessage={ 'Data': message } ) message_id = result['MessageId'] post_send.send( self.__class__, message=email_message, message_id=message_id ) except ClientError: if not self.fail_silently: raise return False return True
python
def _send(self, email_message): """Sends an individual message via the Amazon SES HTTP API. Args: email_message: A single Django EmailMessage object. Returns: True if the EmailMessage was sent successfully, otherwise False. Raises: ClientError: An interaction with the Amazon SES HTTP API failed. """ pre_send.send(self.__class__, message=email_message) if not email_message.recipients(): return False from_email = sanitize_address(email_message.from_email, email_message.encoding) recipients = [sanitize_address(addr, email_message.encoding) for addr in email_message.recipients()] message = email_message.message().as_bytes(linesep='\r\n') try: result = self.conn.send_raw_email( Source=from_email, Destinations=recipients, RawMessage={ 'Data': message } ) message_id = result['MessageId'] post_send.send( self.__class__, message=email_message, message_id=message_id ) except ClientError: if not self.fail_silently: raise return False return True
Sends an individual message via the Amazon SES HTTP API. Args: email_message: A single Django EmailMessage object. Returns: True if the EmailMessage was sent successfully, otherwise False. Raises: ClientError: An interaction with the Amazon SES HTTP API failed.
https://github.com/azavea/django-amazon-ses/blob/668c2e240ee643d02294d28966a9d44cf30dfc7f/django_amazon_ses.py#L80-L120
datadesk/django-bakery
bakery/static_views.py
serve
def serve(request, path, document_root=None, show_indexes=False, default=''): """ Serve static files below a given point in the directory structure. To use, put a URL pattern such as:: (r'^(?P<path>.*)$', 'django.views.static.serve', {'document_root' : '/path/to/my/files/'}) in your URLconf. You must provide the ``document_root`` param. You may also set ``show_indexes`` to ``True`` if you'd like to serve a basic index of the directory. This index view will use the template hardcoded below, but if you'd like to override it, you can create a template called ``static/directory_index.html``. Modified by ticket #1013 to serve index.html files in the same manner as Apache and other web servers. https://code.djangoproject.com/ticket/1013 """ # Clean up given path to only allow serving files below document_root. path = posixpath.normpath(unquote(path)) path = path.lstrip('/') newpath = '' for part in path.split('/'): if not part: # Strip empty path components. continue drive, part = os.path.splitdrive(part) head, part = os.path.split(part) if part in (os.curdir, os.pardir): # Strip '.' and '..' in path. continue newpath = os.path.join(newpath, part).replace('\\', '/') if newpath and path != newpath: return HttpResponseRedirect(newpath) fullpath = os.path.join(document_root, newpath) if os.path.isdir(fullpath) and default: defaultpath = os.path.join(fullpath, default) if os.path.exists(defaultpath): fullpath = defaultpath if os.path.isdir(fullpath): if show_indexes: return directory_index(newpath, fullpath) raise Http404("Directory indexes are not allowed here.") if not os.path.exists(fullpath): raise Http404('"%s" does not exist' % fullpath) # Respect the If-Modified-Since header. statobj = os.stat(fullpath) mimetype = mimetypes.guess_type(fullpath)[0] or 'application/octet-stream' if not was_modified_since(request.META.get('HTTP_IF_MODIFIED_SINCE'), statobj[stat.ST_MTIME], statobj[stat.ST_SIZE]): if django.VERSION > (1, 6): return HttpResponseNotModified(content_type=mimetype) else: return HttpResponseNotModified(mimetype=mimetype) contents = open(fullpath, 'rb').read() if django.VERSION > (1, 6): response = HttpResponse(contents, content_type=mimetype) else: response = HttpResponse(contents, mimetype=mimetype) response["Last-Modified"] = http_date(statobj[stat.ST_MTIME]) response["Content-Length"] = len(contents) return response
python
def serve(request, path, document_root=None, show_indexes=False, default=''): """ Serve static files below a given point in the directory structure. To use, put a URL pattern such as:: (r'^(?P<path>.*)$', 'django.views.static.serve', {'document_root' : '/path/to/my/files/'}) in your URLconf. You must provide the ``document_root`` param. You may also set ``show_indexes`` to ``True`` if you'd like to serve a basic index of the directory. This index view will use the template hardcoded below, but if you'd like to override it, you can create a template called ``static/directory_index.html``. Modified by ticket #1013 to serve index.html files in the same manner as Apache and other web servers. https://code.djangoproject.com/ticket/1013 """ # Clean up given path to only allow serving files below document_root. path = posixpath.normpath(unquote(path)) path = path.lstrip('/') newpath = '' for part in path.split('/'): if not part: # Strip empty path components. continue drive, part = os.path.splitdrive(part) head, part = os.path.split(part) if part in (os.curdir, os.pardir): # Strip '.' and '..' in path. continue newpath = os.path.join(newpath, part).replace('\\', '/') if newpath and path != newpath: return HttpResponseRedirect(newpath) fullpath = os.path.join(document_root, newpath) if os.path.isdir(fullpath) and default: defaultpath = os.path.join(fullpath, default) if os.path.exists(defaultpath): fullpath = defaultpath if os.path.isdir(fullpath): if show_indexes: return directory_index(newpath, fullpath) raise Http404("Directory indexes are not allowed here.") if not os.path.exists(fullpath): raise Http404('"%s" does not exist' % fullpath) # Respect the If-Modified-Since header. statobj = os.stat(fullpath) mimetype = mimetypes.guess_type(fullpath)[0] or 'application/octet-stream' if not was_modified_since(request.META.get('HTTP_IF_MODIFIED_SINCE'), statobj[stat.ST_MTIME], statobj[stat.ST_SIZE]): if django.VERSION > (1, 6): return HttpResponseNotModified(content_type=mimetype) else: return HttpResponseNotModified(mimetype=mimetype) contents = open(fullpath, 'rb').read() if django.VERSION > (1, 6): response = HttpResponse(contents, content_type=mimetype) else: response = HttpResponse(contents, mimetype=mimetype) response["Last-Modified"] = http_date(statobj[stat.ST_MTIME]) response["Content-Length"] = len(contents) return response
Serve static files below a given point in the directory structure. To use, put a URL pattern such as:: (r'^(?P<path>.*)$', 'django.views.static.serve', {'document_root' : '/path/to/my/files/'}) in your URLconf. You must provide the ``document_root`` param. You may also set ``show_indexes`` to ``True`` if you'd like to serve a basic index of the directory. This index view will use the template hardcoded below, but if you'd like to override it, you can create a template called ``static/directory_index.html``. Modified by ticket #1013 to serve index.html files in the same manner as Apache and other web servers. https://code.djangoproject.com/ticket/1013
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/static_views.py#L19-L83
datadesk/django-bakery
bakery/static_views.py
was_modified_since
def was_modified_since(header=None, mtime=0, size=0): """ Was something modified since the user last downloaded it? header This is the value of the If-Modified-Since header. If this is None, I'll just return True. mtime This is the modification time of the item we're talking about. size This is the size of the item we're talking about. """ try: if header is None: raise ValueError matches = re.match(r"^([^;]+)(; length=([0-9]+))?$", header, re.IGNORECASE) header_mtime = parse_http_date(matches.group(1)) header_len = matches.group(3) if header_len and int(header_len) != size: raise ValueError if int(mtime) > header_mtime: raise ValueError except (AttributeError, ValueError, OverflowError): return True return False
python
def was_modified_since(header=None, mtime=0, size=0): """ Was something modified since the user last downloaded it? header This is the value of the If-Modified-Since header. If this is None, I'll just return True. mtime This is the modification time of the item we're talking about. size This is the size of the item we're talking about. """ try: if header is None: raise ValueError matches = re.match(r"^([^;]+)(; length=([0-9]+))?$", header, re.IGNORECASE) header_mtime = parse_http_date(matches.group(1)) header_len = matches.group(3) if header_len and int(header_len) != size: raise ValueError if int(mtime) > header_mtime: raise ValueError except (AttributeError, ValueError, OverflowError): return True return False
Was something modified since the user last downloaded it? header This is the value of the If-Modified-Since header. If this is None, I'll just return True. mtime This is the modification time of the item we're talking about. size This is the size of the item we're talking about.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/static_views.py#L135-L159
datadesk/django-bakery
bakery/views/detail.py
BuildableDetailView.get_url
def get_url(self, obj): """ The URL at which the detail page should appear. """ if not hasattr(obj, 'get_absolute_url') or not obj.get_absolute_url(): raise ImproperlyConfigured("No URL configured. You must either \ set a ``get_absolute_url`` method on the %s model or override the %s view's \ ``get_url`` method" % (obj.__class__.__name__, self.__class__.__name__)) return obj.get_absolute_url()
python
def get_url(self, obj): """ The URL at which the detail page should appear. """ if not hasattr(obj, 'get_absolute_url') or not obj.get_absolute_url(): raise ImproperlyConfigured("No URL configured. You must either \ set a ``get_absolute_url`` method on the %s model or override the %s view's \ ``get_url`` method" % (obj.__class__.__name__, self.__class__.__name__)) return obj.get_absolute_url()
The URL at which the detail page should appear.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/views/detail.py#L32-L40
datadesk/django-bakery
bakery/views/detail.py
BuildableDetailView.unbuild_object
def unbuild_object(self, obj): """ Deletes the directory at self.get_build_path. """ logger.debug("Unbuilding %s" % obj) target_path = os.path.split(self.get_build_path(obj))[0] if self.fs.exists(target_path): logger.debug("Removing {}".format(target_path)) self.fs.removetree(target_path)
python
def unbuild_object(self, obj): """ Deletes the directory at self.get_build_path. """ logger.debug("Unbuilding %s" % obj) target_path = os.path.split(self.get_build_path(obj))[0] if self.fs.exists(target_path): logger.debug("Removing {}".format(target_path)) self.fs.removetree(target_path)
Deletes the directory at self.get_build_path.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/views/detail.py#L74-L82
datadesk/django-bakery
bakery/tasks.py
unpublish_object
def unpublish_object(content_type_pk, obj_pk): """ Unbuild all views related to a object and then sync to S3. Accepts primary keys to retrieve a model object that inherits bakery's BuildableModel class. """ ct = ContentType.objects.get_for_id(content_type_pk) obj = ct.get_object_for_this_type(pk=obj_pk) try: # Unbuild the object logger.info("unpublish_object task has received %s" % obj) obj.unbuild() # Run the `publish` management command unless the # ALLOW_BAKERY_AUTO_PUBLISHING variable is explictly set to False. if not getattr(settings, 'ALLOW_BAKERY_AUTO_PUBLISHING', True): logger.info("Not running publish command because \ ALLOW_BAKERY_AUTO_PUBLISHING is False") else: management.call_command("publish") except Exception: # Log the error if this crashes logger.error("Task Error: unpublish_object", exc_info=True)
python
def unpublish_object(content_type_pk, obj_pk): """ Unbuild all views related to a object and then sync to S3. Accepts primary keys to retrieve a model object that inherits bakery's BuildableModel class. """ ct = ContentType.objects.get_for_id(content_type_pk) obj = ct.get_object_for_this_type(pk=obj_pk) try: # Unbuild the object logger.info("unpublish_object task has received %s" % obj) obj.unbuild() # Run the `publish` management command unless the # ALLOW_BAKERY_AUTO_PUBLISHING variable is explictly set to False. if not getattr(settings, 'ALLOW_BAKERY_AUTO_PUBLISHING', True): logger.info("Not running publish command because \ ALLOW_BAKERY_AUTO_PUBLISHING is False") else: management.call_command("publish") except Exception: # Log the error if this crashes logger.error("Task Error: unpublish_object", exc_info=True)
Unbuild all views related to a object and then sync to S3. Accepts primary keys to retrieve a model object that inherits bakery's BuildableModel class.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/tasks.py#L39-L61
datadesk/django-bakery
bakery/views/base.py
BuildableMixin.prep_directory
def prep_directory(self, target_dir): """ Prepares a new directory to store the file at the provided path, if needed. """ dirname = path.dirname(target_dir) if dirname: dirname = path.join(settings.BUILD_DIR, dirname) if not self.fs.exists(dirname): logger.debug("Creating directory at {}{}".format(self.fs_name, dirname)) self.fs.makedirs(dirname)
python
def prep_directory(self, target_dir): """ Prepares a new directory to store the file at the provided path, if needed. """ dirname = path.dirname(target_dir) if dirname: dirname = path.join(settings.BUILD_DIR, dirname) if not self.fs.exists(dirname): logger.debug("Creating directory at {}{}".format(self.fs_name, dirname)) self.fs.makedirs(dirname)
Prepares a new directory to store the file at the provided path, if needed.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/views/base.py#L55-L64
datadesk/django-bakery
bakery/views/base.py
BuildableMixin.write_file
def write_file(self, target_path, html): """ Writes out the provided HTML to the provided path. """ logger.debug("Building to {}{}".format(self.fs_name, target_path)) with self.fs.open(smart_text(target_path), 'wb') as outfile: outfile.write(six.binary_type(html)) outfile.close()
python
def write_file(self, target_path, html): """ Writes out the provided HTML to the provided path. """ logger.debug("Building to {}{}".format(self.fs_name, target_path)) with self.fs.open(smart_text(target_path), 'wb') as outfile: outfile.write(six.binary_type(html)) outfile.close()
Writes out the provided HTML to the provided path.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/views/base.py#L72-L79
datadesk/django-bakery
bakery/views/base.py
BuildableMixin.is_gzippable
def is_gzippable(self, path): """ Returns a boolean indicating if the provided file path is a candidate for gzipping. """ # First check if gzipping is allowed by the global setting if not getattr(settings, 'BAKERY_GZIP', False): return False # Then check if the content type of this particular file is gzippable whitelist = getattr( settings, 'GZIP_CONTENT_TYPES', DEFAULT_GZIP_CONTENT_TYPES ) return mimetypes.guess_type(path)[0] in whitelist
python
def is_gzippable(self, path): """ Returns a boolean indicating if the provided file path is a candidate for gzipping. """ # First check if gzipping is allowed by the global setting if not getattr(settings, 'BAKERY_GZIP', False): return False # Then check if the content type of this particular file is gzippable whitelist = getattr( settings, 'GZIP_CONTENT_TYPES', DEFAULT_GZIP_CONTENT_TYPES ) return mimetypes.guess_type(path)[0] in whitelist
Returns a boolean indicating if the provided file path is a candidate for gzipping.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/views/base.py#L81-L95
datadesk/django-bakery
bakery/views/base.py
BuildableMixin.gzip_file
def gzip_file(self, target_path, html): """ Zips up the provided HTML as a companion for the provided path. Intended to take advantage of the peculiarities of Amazon S3's GZIP service. mtime, an option that writes a timestamp to the output file is set to 0, to avoid having s3cmd do unnecessary uploads because of differences in the timestamp """ logger.debug("Gzipping to {}{}".format(self.fs_name, target_path)) # Write GZIP data to an in-memory buffer data_buffer = six.BytesIO() kwargs = dict( filename=path.basename(target_path), mode='wb', fileobj=data_buffer ) if float(sys.version[:3]) >= 2.7: kwargs['mtime'] = 0 with gzip.GzipFile(**kwargs) as f: f.write(six.binary_type(html)) # Write that buffer out to the filesystem with self.fs.open(smart_text(target_path), 'wb') as outfile: outfile.write(data_buffer.getvalue()) outfile.close()
python
def gzip_file(self, target_path, html): """ Zips up the provided HTML as a companion for the provided path. Intended to take advantage of the peculiarities of Amazon S3's GZIP service. mtime, an option that writes a timestamp to the output file is set to 0, to avoid having s3cmd do unnecessary uploads because of differences in the timestamp """ logger.debug("Gzipping to {}{}".format(self.fs_name, target_path)) # Write GZIP data to an in-memory buffer data_buffer = six.BytesIO() kwargs = dict( filename=path.basename(target_path), mode='wb', fileobj=data_buffer ) if float(sys.version[:3]) >= 2.7: kwargs['mtime'] = 0 with gzip.GzipFile(**kwargs) as f: f.write(six.binary_type(html)) # Write that buffer out to the filesystem with self.fs.open(smart_text(target_path), 'wb') as outfile: outfile.write(data_buffer.getvalue()) outfile.close()
Zips up the provided HTML as a companion for the provided path. Intended to take advantage of the peculiarities of Amazon S3's GZIP service. mtime, an option that writes a timestamp to the output file is set to 0, to avoid having s3cmd do unnecessary uploads because of differences in the timestamp
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/views/base.py#L97-L125
datadesk/django-bakery
bakery/views/base.py
BuildableRedirectView.get_redirect_url
def get_redirect_url(self, *args, **kwargs): """ Return the URL redirect to. Keyword arguments from the URL pattern match generating the redirect request are provided as kwargs to this method. """ if self.url: url = self.url % kwargs elif self.pattern_name: try: url = reverse(self.pattern_name, args=args, kwargs=kwargs) except NoReverseMatch: return None else: return None return url
python
def get_redirect_url(self, *args, **kwargs): """ Return the URL redirect to. Keyword arguments from the URL pattern match generating the redirect request are provided as kwargs to this method. """ if self.url: url = self.url % kwargs elif self.pattern_name: try: url = reverse(self.pattern_name, args=args, kwargs=kwargs) except NoReverseMatch: return None else: return None return url
Return the URL redirect to. Keyword arguments from the URL pattern match generating the redirect request are provided as kwargs to this method.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/views/base.py#L208-L223
datadesk/django-bakery
bakery/models.py
BuildableModel.build
def build(self): """ Iterates through the views pointed to by self.detail_views, runs build_object with `self`, and calls _build_extra() and _build_related(). """ for detail_view in self.detail_views: view = self._get_view(detail_view) view().build_object(self) self._build_extra() self._build_related()
python
def build(self): """ Iterates through the views pointed to by self.detail_views, runs build_object with `self`, and calls _build_extra() and _build_related(). """ for detail_view in self.detail_views: view = self._get_view(detail_view) view().build_object(self) self._build_extra() self._build_related()
Iterates through the views pointed to by self.detail_views, runs build_object with `self`, and calls _build_extra() and _build_related().
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/models.py#L51-L61
datadesk/django-bakery
bakery/models.py
BuildableModel.unbuild
def unbuild(self): """ Iterates through the views pointed to by self.detail_views, runs unbuild_object with `self`, and calls _build_extra() and _build_related(). """ for detail_view in self.detail_views: view = self._get_view(detail_view) view().unbuild_object(self) self._unbuild_extra() # _build_related again to kill the object from RSS etc. self._build_related()
python
def unbuild(self): """ Iterates through the views pointed to by self.detail_views, runs unbuild_object with `self`, and calls _build_extra() and _build_related(). """ for detail_view in self.detail_views: view = self._get_view(detail_view) view().unbuild_object(self) self._unbuild_extra() # _build_related again to kill the object from RSS etc. self._build_related()
Iterates through the views pointed to by self.detail_views, runs unbuild_object with `self`, and calls _build_extra() and _build_related().
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/models.py#L63-L74
datadesk/django-bakery
bakery/models.py
AutoPublishingBuildableModel.save
def save(self, *args, **kwargs): """ A custom save that publishes or unpublishes the object where appropriate. Save with keyword argument obj.save(publish=False) to skip the process. """ from bakery import tasks from django.contrib.contenttypes.models import ContentType # if obj.save(publish=False) has been passed, we skip everything. if not kwargs.pop('publish', True): super(AutoPublishingBuildableModel, self).save(*args, **kwargs) # Otherwise, for the standard obj.save(), here we go... else: # First figure out if the record is an addition, or an edit of # a preexisting record. try: preexisting = self.__class__.objects.get(pk=self.pk) except self.__class__.DoesNotExist: preexisting = None # If this is an addition... if not preexisting: # We will publish if that's the boolean if self.get_publication_status(): action = 'publish' # Otherwise we will do nothing do nothing else: action = None # If this is an edit... else: # If it's being unpublished... if not self.get_publication_status() and \ preexisting.get_publication_status(): action = 'unpublish' # If it's being published... elif self.get_publication_status(): action = 'publish' # If it's remaining unpublished... else: action = None # Now, no matter what, save it normally inside of a dedicated # database transaction so that we are sure that the save will # be complete before we trigger any task with transaction.atomic(): super(AutoPublishingBuildableModel, self).save(*args, **kwargs) # Finally, depending on the action, fire off a task ct = ContentType.objects.get_for_model(self.__class__) if action == 'publish': tasks.publish_object.delay(ct.pk, self.pk) elif action == 'unpublish': tasks.unpublish_object.delay(ct.pk, self.pk)
python
def save(self, *args, **kwargs): """ A custom save that publishes or unpublishes the object where appropriate. Save with keyword argument obj.save(publish=False) to skip the process. """ from bakery import tasks from django.contrib.contenttypes.models import ContentType # if obj.save(publish=False) has been passed, we skip everything. if not kwargs.pop('publish', True): super(AutoPublishingBuildableModel, self).save(*args, **kwargs) # Otherwise, for the standard obj.save(), here we go... else: # First figure out if the record is an addition, or an edit of # a preexisting record. try: preexisting = self.__class__.objects.get(pk=self.pk) except self.__class__.DoesNotExist: preexisting = None # If this is an addition... if not preexisting: # We will publish if that's the boolean if self.get_publication_status(): action = 'publish' # Otherwise we will do nothing do nothing else: action = None # If this is an edit... else: # If it's being unpublished... if not self.get_publication_status() and \ preexisting.get_publication_status(): action = 'unpublish' # If it's being published... elif self.get_publication_status(): action = 'publish' # If it's remaining unpublished... else: action = None # Now, no matter what, save it normally inside of a dedicated # database transaction so that we are sure that the save will # be complete before we trigger any task with transaction.atomic(): super(AutoPublishingBuildableModel, self).save(*args, **kwargs) # Finally, depending on the action, fire off a task ct = ContentType.objects.get_for_model(self.__class__) if action == 'publish': tasks.publish_object.delay(ct.pk, self.pk) elif action == 'unpublish': tasks.unpublish_object.delay(ct.pk, self.pk)
A custom save that publishes or unpublishes the object where appropriate. Save with keyword argument obj.save(publish=False) to skip the process.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/models.py#L117-L167
datadesk/django-bakery
bakery/models.py
AutoPublishingBuildableModel.delete
def delete(self, *args, **kwargs): """ Triggers a task that will unpublish the object after it is deleted. Save with keyword argument obj.delete(unpublish=False) to skip it. """ from bakery import tasks from django.contrib.contenttypes.models import ContentType # if obj.save(unpublish=False) has been passed, we skip the task. unpublish = kwargs.pop('unpublish', True) # Delete it from the database super(AutoPublishingBuildableModel, self).delete(*args, **kwargs) if unpublish: ct = ContentType.objects.get_for_model(self.__class__) tasks.unpublish_object.delay(ct.pk, self.pk)
python
def delete(self, *args, **kwargs): """ Triggers a task that will unpublish the object after it is deleted. Save with keyword argument obj.delete(unpublish=False) to skip it. """ from bakery import tasks from django.contrib.contenttypes.models import ContentType # if obj.save(unpublish=False) has been passed, we skip the task. unpublish = kwargs.pop('unpublish', True) # Delete it from the database super(AutoPublishingBuildableModel, self).delete(*args, **kwargs) if unpublish: ct = ContentType.objects.get_for_model(self.__class__) tasks.unpublish_object.delay(ct.pk, self.pk)
Triggers a task that will unpublish the object after it is deleted. Save with keyword argument obj.delete(unpublish=False) to skip it.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/models.py#L169-L183
datadesk/django-bakery
bakery/management/commands/build.py
Command.handle
def handle(self, *args, **options): """ Making it happen. """ logger.info("Build started") # Set options self.set_options(*args, **options) # Get the build directory ready if not options.get("keep_build_dir"): self.init_build_dir() # Build up static files if not options.get("skip_static"): self.build_static() # Build the media directory if not options.get("skip_media"): self.build_media() # Build views self.build_views() # Close out logger.info("Build finished")
python
def handle(self, *args, **options): """ Making it happen. """ logger.info("Build started") # Set options self.set_options(*args, **options) # Get the build directory ready if not options.get("keep_build_dir"): self.init_build_dir() # Build up static files if not options.get("skip_static"): self.build_static() # Build the media directory if not options.get("skip_media"): self.build_media() # Build views self.build_views() # Close out logger.info("Build finished")
Making it happen.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/build.py#L90-L115
datadesk/django-bakery
bakery/management/commands/build.py
Command.set_options
def set_options(self, *args, **options): """ Configure a few global options before things get going. """ self.verbosity = int(options.get('verbosity', 1)) # Figure out what build directory to use if options.get("build_dir"): self.build_dir = options.get("build_dir") settings.BUILD_DIR = self.build_dir else: if not hasattr(settings, 'BUILD_DIR'): raise CommandError(self.build_unconfig_msg) self.build_dir = settings.BUILD_DIR # Get the datatypes right so fs will be happy self.build_dir = smart_text(self.build_dir) self.static_root = smart_text(settings.STATIC_ROOT) self.media_root = smart_text(settings.MEDIA_ROOT) # Connect the BUILD_DIR with our filesystem backend self.app = apps.get_app_config("bakery") self.fs = self.app.filesystem self.fs_name = self.app.filesystem_name # If the build dir doesn't exist make it if not self.fs.exists(self.build_dir): self.fs.makedirs(self.build_dir) # Figure out what views we'll be using if options.get('view_list'): self.view_list = options['view_list'] else: if not hasattr(settings, 'BAKERY_VIEWS'): raise CommandError(self.views_unconfig_msg) self.view_list = settings.BAKERY_VIEWS # Are we pooling? self.pooling = options.get('pooling')
python
def set_options(self, *args, **options): """ Configure a few global options before things get going. """ self.verbosity = int(options.get('verbosity', 1)) # Figure out what build directory to use if options.get("build_dir"): self.build_dir = options.get("build_dir") settings.BUILD_DIR = self.build_dir else: if not hasattr(settings, 'BUILD_DIR'): raise CommandError(self.build_unconfig_msg) self.build_dir = settings.BUILD_DIR # Get the datatypes right so fs will be happy self.build_dir = smart_text(self.build_dir) self.static_root = smart_text(settings.STATIC_ROOT) self.media_root = smart_text(settings.MEDIA_ROOT) # Connect the BUILD_DIR with our filesystem backend self.app = apps.get_app_config("bakery") self.fs = self.app.filesystem self.fs_name = self.app.filesystem_name # If the build dir doesn't exist make it if not self.fs.exists(self.build_dir): self.fs.makedirs(self.build_dir) # Figure out what views we'll be using if options.get('view_list'): self.view_list = options['view_list'] else: if not hasattr(settings, 'BAKERY_VIEWS'): raise CommandError(self.views_unconfig_msg) self.view_list = settings.BAKERY_VIEWS # Are we pooling? self.pooling = options.get('pooling')
Configure a few global options before things get going.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/build.py#L117-L155
datadesk/django-bakery
bakery/management/commands/build.py
Command.init_build_dir
def init_build_dir(self): """ Clear out the build directory and create a new one. """ # Destroy the build directory, if it exists logger.debug("Initializing %s" % self.build_dir) if self.verbosity > 1: self.stdout.write("Initializing build directory") if self.fs.exists(self.build_dir): self.fs.removetree(self.build_dir) # Then recreate it from scratch self.fs.makedirs(self.build_dir)
python
def init_build_dir(self): """ Clear out the build directory and create a new one. """ # Destroy the build directory, if it exists logger.debug("Initializing %s" % self.build_dir) if self.verbosity > 1: self.stdout.write("Initializing build directory") if self.fs.exists(self.build_dir): self.fs.removetree(self.build_dir) # Then recreate it from scratch self.fs.makedirs(self.build_dir)
Clear out the build directory and create a new one.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/build.py#L157-L168
datadesk/django-bakery
bakery/management/commands/build.py
Command.build_static
def build_static(self, *args, **options): """ Builds the static files directory as well as robots.txt and favicon.ico """ logger.debug("Building static directory") if self.verbosity > 1: self.stdout.write("Building static directory") management.call_command( "collectstatic", interactive=False, verbosity=0 ) # Set the target directory inside the filesystem. target_dir = path.join( self.build_dir, settings.STATIC_URL.lstrip('/') ) target_dir = smart_text(target_dir) if os.path.exists(self.static_root) and settings.STATIC_URL: if getattr(settings, 'BAKERY_GZIP', False): self.copytree_and_gzip(self.static_root, target_dir) # if gzip isn't enabled, just copy the tree straight over else: logger.debug("Copying {}{} to {}{}".format("osfs://", self.static_root, self.fs_name, target_dir)) copy.copy_dir("osfs:///", self.static_root, self.fs, target_dir) # If they exist in the static directory, copy the robots.txt # and favicon.ico files down to the root so they will work # on the live website. robots_src = path.join(target_dir, 'robots.txt') if self.fs.exists(robots_src): robots_target = path.join(self.build_dir, 'robots.txt') logger.debug("Copying {}{} to {}{}".format(self.fs_name, robots_src, self.fs_name, robots_target)) self.fs.copy(robots_src, robots_target) favicon_src = path.join(target_dir, 'favicon.ico') if self.fs.exists(favicon_src): favicon_target = path.join(self.build_dir, 'favicon.ico') logger.debug("Copying {}{} to {}{}".format(self.fs_name, favicon_src, self.fs_name, favicon_target)) self.fs.copy(favicon_src, favicon_target)
python
def build_static(self, *args, **options): """ Builds the static files directory as well as robots.txt and favicon.ico """ logger.debug("Building static directory") if self.verbosity > 1: self.stdout.write("Building static directory") management.call_command( "collectstatic", interactive=False, verbosity=0 ) # Set the target directory inside the filesystem. target_dir = path.join( self.build_dir, settings.STATIC_URL.lstrip('/') ) target_dir = smart_text(target_dir) if os.path.exists(self.static_root) and settings.STATIC_URL: if getattr(settings, 'BAKERY_GZIP', False): self.copytree_and_gzip(self.static_root, target_dir) # if gzip isn't enabled, just copy the tree straight over else: logger.debug("Copying {}{} to {}{}".format("osfs://", self.static_root, self.fs_name, target_dir)) copy.copy_dir("osfs:///", self.static_root, self.fs, target_dir) # If they exist in the static directory, copy the robots.txt # and favicon.ico files down to the root so they will work # on the live website. robots_src = path.join(target_dir, 'robots.txt') if self.fs.exists(robots_src): robots_target = path.join(self.build_dir, 'robots.txt') logger.debug("Copying {}{} to {}{}".format(self.fs_name, robots_src, self.fs_name, robots_target)) self.fs.copy(robots_src, robots_target) favicon_src = path.join(target_dir, 'favicon.ico') if self.fs.exists(favicon_src): favicon_target = path.join(self.build_dir, 'favicon.ico') logger.debug("Copying {}{} to {}{}".format(self.fs_name, favicon_src, self.fs_name, favicon_target)) self.fs.copy(favicon_src, favicon_target)
Builds the static files directory as well as robots.txt and favicon.ico
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/build.py#L170-L211
datadesk/django-bakery
bakery/management/commands/build.py
Command.build_media
def build_media(self): """ Build the media files. """ logger.debug("Building media directory") if self.verbosity > 1: self.stdout.write("Building media directory") if os.path.exists(self.media_root) and settings.MEDIA_URL: target_dir = path.join(self.fs_name, self.build_dir, settings.MEDIA_URL.lstrip('/')) logger.debug("Copying {}{} to {}{}".format("osfs://", self.media_root, self.fs_name, target_dir)) copy.copy_dir("osfs:///", smart_text(self.media_root), self.fs, smart_text(target_dir))
python
def build_media(self): """ Build the media files. """ logger.debug("Building media directory") if self.verbosity > 1: self.stdout.write("Building media directory") if os.path.exists(self.media_root) and settings.MEDIA_URL: target_dir = path.join(self.fs_name, self.build_dir, settings.MEDIA_URL.lstrip('/')) logger.debug("Copying {}{} to {}{}".format("osfs://", self.media_root, self.fs_name, target_dir)) copy.copy_dir("osfs:///", smart_text(self.media_root), self.fs, smart_text(target_dir))
Build the media files.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/build.py#L213-L223
datadesk/django-bakery
bakery/management/commands/build.py
Command.build_views
def build_views(self): """ Bake out specified buildable views. """ # Then loop through and run them all for view_str in self.view_list: logger.debug("Building %s" % view_str) if self.verbosity > 1: self.stdout.write("Building %s" % view_str) view = get_callable(view_str) self.get_view_instance(view).build_method()
python
def build_views(self): """ Bake out specified buildable views. """ # Then loop through and run them all for view_str in self.view_list: logger.debug("Building %s" % view_str) if self.verbosity > 1: self.stdout.write("Building %s" % view_str) view = get_callable(view_str) self.get_view_instance(view).build_method()
Bake out specified buildable views.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/build.py#L231-L241
datadesk/django-bakery
bakery/management/commands/build.py
Command.copytree_and_gzip
def copytree_and_gzip(self, source_dir, target_dir): """ Copies the provided source directory to the provided target directory. Gzips JavaScript, CSS and HTML and other files along the way. """ # Figure out what we're building... build_list = [] # Walk through the source directory... for (dirpath, dirnames, filenames) in os.walk(source_dir): for f in filenames: # Figure out what is going where source_path = os.path.join(dirpath, f) rel_path = os.path.relpath(dirpath, source_dir) target_path = os.path.join(target_dir, rel_path, f) # Add it to our list to build build_list.append((source_path, target_path)) logger.debug("Gzipping {} files".format(len(build_list))) # Build em all if not getattr(self, 'pooling', False): [self.copyfile_and_gzip(*u) for u in build_list] else: cpu_count = multiprocessing.cpu_count() logger.debug("Pooling build on {} CPUs".format(cpu_count)) pool = ThreadPool(processes=cpu_count) pool.map(self.pooled_copyfile_and_gzip, build_list)
python
def copytree_and_gzip(self, source_dir, target_dir): """ Copies the provided source directory to the provided target directory. Gzips JavaScript, CSS and HTML and other files along the way. """ # Figure out what we're building... build_list = [] # Walk through the source directory... for (dirpath, dirnames, filenames) in os.walk(source_dir): for f in filenames: # Figure out what is going where source_path = os.path.join(dirpath, f) rel_path = os.path.relpath(dirpath, source_dir) target_path = os.path.join(target_dir, rel_path, f) # Add it to our list to build build_list.append((source_path, target_path)) logger.debug("Gzipping {} files".format(len(build_list))) # Build em all if not getattr(self, 'pooling', False): [self.copyfile_and_gzip(*u) for u in build_list] else: cpu_count = multiprocessing.cpu_count() logger.debug("Pooling build on {} CPUs".format(cpu_count)) pool = ThreadPool(processes=cpu_count) pool.map(self.pooled_copyfile_and_gzip, build_list)
Copies the provided source directory to the provided target directory. Gzips JavaScript, CSS and HTML and other files along the way.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/build.py#L243-L270
datadesk/django-bakery
bakery/management/commands/build.py
Command.copyfile_and_gzip
def copyfile_and_gzip(self, source_path, target_path): """ Copies the provided file to the provided target directory. Gzips JavaScript, CSS and HTML and other files along the way. """ # And then where we want to copy it to. target_dir = path.dirname(target_path) if not self.fs.exists(target_dir): try: self.fs.makedirs(target_dir) except OSError: pass # determine the mimetype of the file guess = mimetypes.guess_type(source_path) content_type = guess[0] encoding = guess[1] # If it isn't a file want to gzip... if content_type not in self.gzip_file_match: # just copy it to the target. logger.debug("Copying {}{} to {}{} because its filetype isn't on the whitelist".format( "osfs://", source_path, self.fs_name, target_path )) copy.copy_file("osfs:///", smart_text(source_path), self.fs, smart_text(target_path)) # # if the file is already gzipped elif encoding == 'gzip': logger.debug("Copying {}{} to {}{} because it's already gzipped".format( "osfs://", source_path, self.fs_name, target_path )) copy.copy_file("osfs:///", smart_text(source_path), self.fs, smart_text(target_path)) # If it is one we want to gzip... else: # ... let the world know ... logger.debug("Gzipping {}{} to {}{}".format( "osfs://", source_path, self.fs_name, target_path )) # Open up the source file from the OS with open(source_path, 'rb') as source_file: # Write GZIP data to an in-memory buffer data_buffer = six.BytesIO() kwargs = dict( filename=path.basename(target_path), mode='wb', fileobj=data_buffer ) if float(sys.version[:3]) >= 2.7: kwargs['mtime'] = 0 with gzip.GzipFile(**kwargs) as f: f.write(six.binary_type(source_file.read())) # Write that buffer out to the filesystem with self.fs.open(smart_text(target_path), 'wb') as outfile: outfile.write(data_buffer.getvalue()) outfile.close()
python
def copyfile_and_gzip(self, source_path, target_path): """ Copies the provided file to the provided target directory. Gzips JavaScript, CSS and HTML and other files along the way. """ # And then where we want to copy it to. target_dir = path.dirname(target_path) if not self.fs.exists(target_dir): try: self.fs.makedirs(target_dir) except OSError: pass # determine the mimetype of the file guess = mimetypes.guess_type(source_path) content_type = guess[0] encoding = guess[1] # If it isn't a file want to gzip... if content_type not in self.gzip_file_match: # just copy it to the target. logger.debug("Copying {}{} to {}{} because its filetype isn't on the whitelist".format( "osfs://", source_path, self.fs_name, target_path )) copy.copy_file("osfs:///", smart_text(source_path), self.fs, smart_text(target_path)) # # if the file is already gzipped elif encoding == 'gzip': logger.debug("Copying {}{} to {}{} because it's already gzipped".format( "osfs://", source_path, self.fs_name, target_path )) copy.copy_file("osfs:///", smart_text(source_path), self.fs, smart_text(target_path)) # If it is one we want to gzip... else: # ... let the world know ... logger.debug("Gzipping {}{} to {}{}".format( "osfs://", source_path, self.fs_name, target_path )) # Open up the source file from the OS with open(source_path, 'rb') as source_file: # Write GZIP data to an in-memory buffer data_buffer = six.BytesIO() kwargs = dict( filename=path.basename(target_path), mode='wb', fileobj=data_buffer ) if float(sys.version[:3]) >= 2.7: kwargs['mtime'] = 0 with gzip.GzipFile(**kwargs) as f: f.write(six.binary_type(source_file.read())) # Write that buffer out to the filesystem with self.fs.open(smart_text(target_path), 'wb') as outfile: outfile.write(data_buffer.getvalue()) outfile.close()
Copies the provided file to the provided target directory. Gzips JavaScript, CSS and HTML and other files along the way.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/build.py#L280-L346
datadesk/django-bakery
bakery/management/commands/publish.py
Command.handle
def handle(self, *args, **options): """ Sync files in the build directory to a specified S3 bucket """ # Counts and such we can use to keep tabs on this as they progress self.uploaded_files = 0 self.uploaded_file_list = [] self.deleted_files = 0 self.deleted_file_list = [] self.start_time = time.time() # Configure all the options we're going to use self.set_options(options) # Initialize the boto connection logger.debug("Connecting to s3") if self.verbosity > 2: self.stdout.write("Connecting to s3") self.s3_client, self.s3_resource = get_s3_client() # Grab our bucket logger.debug("Retriving bucket {}".format(self.aws_bucket_name)) if self.verbosity > 2: self.stdout.write("Retriving bucket {}".format(self.aws_bucket_name)) self.bucket = self.s3_resource.Bucket(self.aws_bucket_name) # Get a list of all keys in our s3 bucket ... # ...nunless you're this is case where we're blindly pushing if self.force_publish and self.no_delete: self.blind_upload = True logger.debug("Skipping object retrieval. We won't need to because we're blinding uploading everything.") self.s3_obj_dict = {} else: self.blind_upload = False logger.debug("Retrieving objects now published in bucket") if self.verbosity > 2: self.stdout.write("Retrieving objects now published in bucket") self.s3_obj_dict = {} self.s3_obj_dict = self.get_bucket_file_list() # Get a list of all the local files in our build directory logger.debug("Retrieving files built locally") if self.verbosity > 2: self.stdout.write("Retrieving files built locally") self.local_file_list = self.get_local_file_list() # Sync local files with s3 bucket logger.debug("Syncing local files with bucket") if self.verbosity > 2: self.stdout.write("Syncing local files with bucket") self.sync_with_s3() # Delete anything that's left in our keys dict if not self.dry_run and not self.no_delete: self.deleted_file_list = list(self.s3_obj_dict.keys()) self.deleted_files = len(self.deleted_file_list) if self.deleted_files: logger.debug("Deleting %s keys" % self.deleted_files) if self.verbosity > 0: self.stdout.write("Deleting %s keys" % self.deleted_files) self.batch_delete_s3_objects( self.deleted_file_list, self.aws_bucket_name ) # Run any post publish hooks on the views if not hasattr(settings, 'BAKERY_VIEWS'): raise CommandError(self.views_unconfig_msg) for view_str in settings.BAKERY_VIEWS: view = get_callable(view_str)() if hasattr(view, 'post_publish'): getattr(view, 'post_publish')(self.bucket) # We're finished, print the final output elapsed_time = time.time() - self.start_time msg = "Publish completed, %d uploaded and %d deleted files in %.2f seconds" % ( self.uploaded_files, self.deleted_files, elapsed_time ) logger.info(msg) if self.verbosity > 0: self.stdout.write(msg) if self.dry_run: logger.info("Publish executed with the --dry-run option. No content was changed on S3.") if self.verbosity > 0: self.stdout.write("Publish executed with the --dry-run option. No content was changed on S3.")
python
def handle(self, *args, **options): """ Sync files in the build directory to a specified S3 bucket """ # Counts and such we can use to keep tabs on this as they progress self.uploaded_files = 0 self.uploaded_file_list = [] self.deleted_files = 0 self.deleted_file_list = [] self.start_time = time.time() # Configure all the options we're going to use self.set_options(options) # Initialize the boto connection logger.debug("Connecting to s3") if self.verbosity > 2: self.stdout.write("Connecting to s3") self.s3_client, self.s3_resource = get_s3_client() # Grab our bucket logger.debug("Retriving bucket {}".format(self.aws_bucket_name)) if self.verbosity > 2: self.stdout.write("Retriving bucket {}".format(self.aws_bucket_name)) self.bucket = self.s3_resource.Bucket(self.aws_bucket_name) # Get a list of all keys in our s3 bucket ... # ...nunless you're this is case where we're blindly pushing if self.force_publish and self.no_delete: self.blind_upload = True logger.debug("Skipping object retrieval. We won't need to because we're blinding uploading everything.") self.s3_obj_dict = {} else: self.blind_upload = False logger.debug("Retrieving objects now published in bucket") if self.verbosity > 2: self.stdout.write("Retrieving objects now published in bucket") self.s3_obj_dict = {} self.s3_obj_dict = self.get_bucket_file_list() # Get a list of all the local files in our build directory logger.debug("Retrieving files built locally") if self.verbosity > 2: self.stdout.write("Retrieving files built locally") self.local_file_list = self.get_local_file_list() # Sync local files with s3 bucket logger.debug("Syncing local files with bucket") if self.verbosity > 2: self.stdout.write("Syncing local files with bucket") self.sync_with_s3() # Delete anything that's left in our keys dict if not self.dry_run and not self.no_delete: self.deleted_file_list = list(self.s3_obj_dict.keys()) self.deleted_files = len(self.deleted_file_list) if self.deleted_files: logger.debug("Deleting %s keys" % self.deleted_files) if self.verbosity > 0: self.stdout.write("Deleting %s keys" % self.deleted_files) self.batch_delete_s3_objects( self.deleted_file_list, self.aws_bucket_name ) # Run any post publish hooks on the views if not hasattr(settings, 'BAKERY_VIEWS'): raise CommandError(self.views_unconfig_msg) for view_str in settings.BAKERY_VIEWS: view = get_callable(view_str)() if hasattr(view, 'post_publish'): getattr(view, 'post_publish')(self.bucket) # We're finished, print the final output elapsed_time = time.time() - self.start_time msg = "Publish completed, %d uploaded and %d deleted files in %.2f seconds" % ( self.uploaded_files, self.deleted_files, elapsed_time ) logger.info(msg) if self.verbosity > 0: self.stdout.write(msg) if self.dry_run: logger.info("Publish executed with the --dry-run option. No content was changed on S3.") if self.verbosity > 0: self.stdout.write("Publish executed with the --dry-run option. No content was changed on S3.")
Sync files in the build directory to a specified S3 bucket
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/publish.py#L86-L173
datadesk/django-bakery
bakery/management/commands/publish.py
Command.set_options
def set_options(self, options): """ Configure all the many options we'll need to make this happen. """ self.verbosity = int(options.get('verbosity')) # Will we be gzipping? self.gzip = getattr(settings, 'BAKERY_GZIP', False) # And if so what content types will we be gzipping? self.gzip_content_types = getattr( settings, 'GZIP_CONTENT_TYPES', DEFAULT_GZIP_CONTENT_TYPES ) # What ACL (i.e. security permissions) will be giving the files on S3? self.acl = getattr(settings, 'DEFAULT_ACL', self.DEFAULT_ACL) # Should we set cache-control headers? self.cache_control = getattr(settings, 'BAKERY_CACHE_CONTROL', {}) # If the user specifies a build directory... if options.get('build_dir'): # ... validate that it is good. if not os.path.exists(options.get('build_dir')): raise CommandError(self.build_missing_msg) # Go ahead and use it self.build_dir = options.get("build_dir") # If the user does not specify a build dir... else: # Check if it is set in settings.py if not hasattr(settings, 'BUILD_DIR'): raise CommandError(self.build_unconfig_msg) # Then make sure it actually exists if not os.path.exists(settings.BUILD_DIR): raise CommandError(self.build_missing_msg) # Go ahead and use it self.build_dir = settings.BUILD_DIR # If the user provides a bucket name, use that. if options.get("aws_bucket_name"): self.aws_bucket_name = options.get("aws_bucket_name") else: # Otherwise try to find it the settings if not hasattr(settings, 'AWS_BUCKET_NAME'): raise CommandError(self.bucket_unconfig_msg) self.aws_bucket_name = settings.AWS_BUCKET_NAME # The bucket prefix, if it exists self.aws_bucket_prefix = options.get("aws_bucket_prefix") # If the user sets the --force option if options.get('force'): self.force_publish = True else: self.force_publish = False # set the --dry-run option if options.get('dry_run'): self.dry_run = True if self.verbosity > 0: logger.info("Executing with the --dry-run option set.") else: self.dry_run = False self.no_delete = options.get('no_delete') self.no_pooling = options.get('no_pooling')
python
def set_options(self, options): """ Configure all the many options we'll need to make this happen. """ self.verbosity = int(options.get('verbosity')) # Will we be gzipping? self.gzip = getattr(settings, 'BAKERY_GZIP', False) # And if so what content types will we be gzipping? self.gzip_content_types = getattr( settings, 'GZIP_CONTENT_TYPES', DEFAULT_GZIP_CONTENT_TYPES ) # What ACL (i.e. security permissions) will be giving the files on S3? self.acl = getattr(settings, 'DEFAULT_ACL', self.DEFAULT_ACL) # Should we set cache-control headers? self.cache_control = getattr(settings, 'BAKERY_CACHE_CONTROL', {}) # If the user specifies a build directory... if options.get('build_dir'): # ... validate that it is good. if not os.path.exists(options.get('build_dir')): raise CommandError(self.build_missing_msg) # Go ahead and use it self.build_dir = options.get("build_dir") # If the user does not specify a build dir... else: # Check if it is set in settings.py if not hasattr(settings, 'BUILD_DIR'): raise CommandError(self.build_unconfig_msg) # Then make sure it actually exists if not os.path.exists(settings.BUILD_DIR): raise CommandError(self.build_missing_msg) # Go ahead and use it self.build_dir = settings.BUILD_DIR # If the user provides a bucket name, use that. if options.get("aws_bucket_name"): self.aws_bucket_name = options.get("aws_bucket_name") else: # Otherwise try to find it the settings if not hasattr(settings, 'AWS_BUCKET_NAME'): raise CommandError(self.bucket_unconfig_msg) self.aws_bucket_name = settings.AWS_BUCKET_NAME # The bucket prefix, if it exists self.aws_bucket_prefix = options.get("aws_bucket_prefix") # If the user sets the --force option if options.get('force'): self.force_publish = True else: self.force_publish = False # set the --dry-run option if options.get('dry_run'): self.dry_run = True if self.verbosity > 0: logger.info("Executing with the --dry-run option set.") else: self.dry_run = False self.no_delete = options.get('no_delete') self.no_pooling = options.get('no_pooling')
Configure all the many options we'll need to make this happen.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/publish.py#L175-L242
datadesk/django-bakery
bakery/management/commands/publish.py
Command.get_bucket_file_list
def get_bucket_file_list(self): """ Little utility method that handles pagination and returns all objects in given bucket. """ logger.debug("Retrieving bucket object list") paginator = self.s3_client.get_paginator('list_objects') options = { 'Bucket': self.aws_bucket_name } if self.aws_bucket_prefix: logger.debug("Adding prefix {} to bucket list as a filter".format(self.aws_bucket_prefix)) options['Prefix'] = self.aws_bucket_prefix page_iterator = paginator.paginate(**options) obj_dict = {} for page in page_iterator: obj_dict.update(get_bucket_page(page)) return obj_dict
python
def get_bucket_file_list(self): """ Little utility method that handles pagination and returns all objects in given bucket. """ logger.debug("Retrieving bucket object list") paginator = self.s3_client.get_paginator('list_objects') options = { 'Bucket': self.aws_bucket_name } if self.aws_bucket_prefix: logger.debug("Adding prefix {} to bucket list as a filter".format(self.aws_bucket_prefix)) options['Prefix'] = self.aws_bucket_prefix page_iterator = paginator.paginate(**options) obj_dict = {} for page in page_iterator: obj_dict.update(get_bucket_page(page)) return obj_dict
Little utility method that handles pagination and returns all objects in given bucket.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/publish.py#L244-L264
datadesk/django-bakery
bakery/management/commands/publish.py
Command.get_local_file_list
def get_local_file_list(self): """ Walk the local build directory and create a list of relative and absolute paths to files. """ file_list = [] for (dirpath, dirnames, filenames) in os.walk(self.build_dir): for fname in filenames: # relative path, to sync with the S3 key local_key = os.path.join( os.path.relpath(dirpath, self.build_dir), fname ) if local_key.startswith('./'): local_key = local_key[2:] file_list.append(local_key) return file_list
python
def get_local_file_list(self): """ Walk the local build directory and create a list of relative and absolute paths to files. """ file_list = [] for (dirpath, dirnames, filenames) in os.walk(self.build_dir): for fname in filenames: # relative path, to sync with the S3 key local_key = os.path.join( os.path.relpath(dirpath, self.build_dir), fname ) if local_key.startswith('./'): local_key = local_key[2:] file_list.append(local_key) return file_list
Walk the local build directory and create a list of relative and absolute paths to files.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/publish.py#L266-L282
datadesk/django-bakery
bakery/management/commands/publish.py
Command.sync_with_s3
def sync_with_s3(self): """ Walk through our self.local_files list, and match them with the list of keys in the S3 bucket. """ # Create a list to put all the files we're going to update self.update_list = [] # Figure out which files need to be updated and upload all these files logger.debug("Comparing {} local files with {} bucket files".format( len(self.local_file_list), len(self.s3_obj_dict.keys()) )) if self.no_pooling: [self.compare_local_file(f) for f in self.local_file_list] else: cpu_count = multiprocessing.cpu_count() logger.debug("Pooling local file comparison on {} CPUs".format(cpu_count)) pool = ThreadPool(processes=cpu_count) pool.map(self.compare_local_file, self.local_file_list) logger.debug("Uploading {} new or updated files to bucket".format(len(self.update_list))) if self.no_pooling: [self.upload_to_s3(*u) for u in self.update_list] else: logger.debug("Pooling s3 uploads on {} CPUs".format(cpu_count)) pool = ThreadPool(processes=cpu_count) pool.map(self.pooled_upload_to_s3, self.update_list)
python
def sync_with_s3(self): """ Walk through our self.local_files list, and match them with the list of keys in the S3 bucket. """ # Create a list to put all the files we're going to update self.update_list = [] # Figure out which files need to be updated and upload all these files logger.debug("Comparing {} local files with {} bucket files".format( len(self.local_file_list), len(self.s3_obj_dict.keys()) )) if self.no_pooling: [self.compare_local_file(f) for f in self.local_file_list] else: cpu_count = multiprocessing.cpu_count() logger.debug("Pooling local file comparison on {} CPUs".format(cpu_count)) pool = ThreadPool(processes=cpu_count) pool.map(self.compare_local_file, self.local_file_list) logger.debug("Uploading {} new or updated files to bucket".format(len(self.update_list))) if self.no_pooling: [self.upload_to_s3(*u) for u in self.update_list] else: logger.debug("Pooling s3 uploads on {} CPUs".format(cpu_count)) pool = ThreadPool(processes=cpu_count) pool.map(self.pooled_upload_to_s3, self.update_list)
Walk through our self.local_files list, and match them with the list of keys in the S3 bucket.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/publish.py#L284-L311
datadesk/django-bakery
bakery/management/commands/publish.py
Command.get_md5
def get_md5(self, filename): """ Returns the md5 checksum of the provided file name. """ with open(filename, 'rb') as f: m = hashlib.md5(f.read()) return m.hexdigest()
python
def get_md5(self, filename): """ Returns the md5 checksum of the provided file name. """ with open(filename, 'rb') as f: m = hashlib.md5(f.read()) return m.hexdigest()
Returns the md5 checksum of the provided file name.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/publish.py#L313-L319
datadesk/django-bakery
bakery/management/commands/publish.py
Command.get_multipart_md5
def get_multipart_md5(self, filename, chunk_size=8 * 1024 * 1024): """ Returns the md5 checksum of the provided file name after breaking it into chunks. This is done to mirror the method used by Amazon S3 after a multipart upload. """ # Loop through the file contents ... md5s = [] with open(filename, 'rb') as fp: while True: # Break it into chunks data = fp.read(chunk_size) # Finish when there are no more if not data: break # Generate a md5 hash for each chunk md5s.append(hashlib.md5(data)) # Combine the chunks digests = b"".join(m.digest() for m in md5s) # Generate a new hash using them new_md5 = hashlib.md5(digests) # Create the ETag as Amazon will new_etag = '"%s-%s"' % (new_md5.hexdigest(), len(md5s)) # Trim it down and pass it back for comparison return new_etag.strip('"').strip("'")
python
def get_multipart_md5(self, filename, chunk_size=8 * 1024 * 1024): """ Returns the md5 checksum of the provided file name after breaking it into chunks. This is done to mirror the method used by Amazon S3 after a multipart upload. """ # Loop through the file contents ... md5s = [] with open(filename, 'rb') as fp: while True: # Break it into chunks data = fp.read(chunk_size) # Finish when there are no more if not data: break # Generate a md5 hash for each chunk md5s.append(hashlib.md5(data)) # Combine the chunks digests = b"".join(m.digest() for m in md5s) # Generate a new hash using them new_md5 = hashlib.md5(digests) # Create the ETag as Amazon will new_etag = '"%s-%s"' % (new_md5.hexdigest(), len(md5s)) # Trim it down and pass it back for comparison return new_etag.strip('"').strip("'")
Returns the md5 checksum of the provided file name after breaking it into chunks. This is done to mirror the method used by Amazon S3 after a multipart upload.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/publish.py#L321-L349
datadesk/django-bakery
bakery/management/commands/publish.py
Command.compare_local_file
def compare_local_file(self, file_key): """ Compares a local version of a file with what's already published. If an update is needed, the file's key is added self.update_list. """ # Where is the file? file_path = os.path.join(self.build_dir, file_key) # If we're in force_publish mode just add it if self.force_publish: self.update_list.append((file_key, file_path)) # And quit now return # Does it exist in our s3 object list? if file_key in self.s3_obj_dict: # Get the md5 stored in Amazon's header s3_md5 = self.s3_obj_dict[file_key].get('ETag').strip('"').strip("'") # If there is a multipart ETag on S3, compare that to our local file after its chunked up. # We are presuming this file was uploaded in multiple parts. if "-" in s3_md5: local_md5 = self.get_multipart_md5(file_path) # Other, do it straight for the whole file else: local_md5 = self.get_md5(file_path) # If their md5 hexdigests match, do nothing if s3_md5 == local_md5: pass # If they don't match, we want to add it else: logger.debug("{} has changed".format(file_key)) self.update_list.append((file_key, file_path)) # Remove the file from the s3 dict, we don't need it anymore del self.s3_obj_dict[file_key] # If the file doesn't exist, queue it for creation else: logger.debug("{} has been added".format(file_key)) self.update_list.append((file_key, file_path))
python
def compare_local_file(self, file_key): """ Compares a local version of a file with what's already published. If an update is needed, the file's key is added self.update_list. """ # Where is the file? file_path = os.path.join(self.build_dir, file_key) # If we're in force_publish mode just add it if self.force_publish: self.update_list.append((file_key, file_path)) # And quit now return # Does it exist in our s3 object list? if file_key in self.s3_obj_dict: # Get the md5 stored in Amazon's header s3_md5 = self.s3_obj_dict[file_key].get('ETag').strip('"').strip("'") # If there is a multipart ETag on S3, compare that to our local file after its chunked up. # We are presuming this file was uploaded in multiple parts. if "-" in s3_md5: local_md5 = self.get_multipart_md5(file_path) # Other, do it straight for the whole file else: local_md5 = self.get_md5(file_path) # If their md5 hexdigests match, do nothing if s3_md5 == local_md5: pass # If they don't match, we want to add it else: logger.debug("{} has changed".format(file_key)) self.update_list.append((file_key, file_path)) # Remove the file from the s3 dict, we don't need it anymore del self.s3_obj_dict[file_key] # If the file doesn't exist, queue it for creation else: logger.debug("{} has been added".format(file_key)) self.update_list.append((file_key, file_path))
Compares a local version of a file with what's already published. If an update is needed, the file's key is added self.update_list.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/publish.py#L351-L394
datadesk/django-bakery
bakery/management/commands/publish.py
Command.upload_to_s3
def upload_to_s3(self, key, filename): """ Set the content type and gzip headers if applicable and upload the item to S3 """ extra_args = {'ACL': self.acl} # determine the mimetype of the file guess = mimetypes.guess_type(filename) content_type = guess[0] encoding = guess[1] if content_type: extra_args['ContentType'] = content_type # add the gzip headers, if necessary if (self.gzip and content_type in self.gzip_content_types) or encoding == 'gzip': extra_args['ContentEncoding'] = 'gzip' # add the cache-control headers if necessary if content_type in self.cache_control: extra_args['CacheControl'] = ''.join(( 'max-age=', str(self.cache_control[content_type]) )) # access and write the contents from the file if not self.dry_run: logger.debug("Uploading %s" % filename) if self.verbosity > 0: self.stdout.write("Uploading %s" % filename) s3_obj = self.s3_resource.Object(self.aws_bucket_name, key) s3_obj.upload_file(filename, ExtraArgs=extra_args) # Update counts self.uploaded_files += 1 self.uploaded_file_list.append(filename)
python
def upload_to_s3(self, key, filename): """ Set the content type and gzip headers if applicable and upload the item to S3 """ extra_args = {'ACL': self.acl} # determine the mimetype of the file guess = mimetypes.guess_type(filename) content_type = guess[0] encoding = guess[1] if content_type: extra_args['ContentType'] = content_type # add the gzip headers, if necessary if (self.gzip and content_type in self.gzip_content_types) or encoding == 'gzip': extra_args['ContentEncoding'] = 'gzip' # add the cache-control headers if necessary if content_type in self.cache_control: extra_args['CacheControl'] = ''.join(( 'max-age=', str(self.cache_control[content_type]) )) # access and write the contents from the file if not self.dry_run: logger.debug("Uploading %s" % filename) if self.verbosity > 0: self.stdout.write("Uploading %s" % filename) s3_obj = self.s3_resource.Object(self.aws_bucket_name, key) s3_obj.upload_file(filename, ExtraArgs=extra_args) # Update counts self.uploaded_files += 1 self.uploaded_file_list.append(filename)
Set the content type and gzip headers if applicable and upload the item to S3
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/publish.py#L405-L440
datadesk/django-bakery
bakery/feeds.py
BuildableFeed._get_bakery_dynamic_attr
def _get_bakery_dynamic_attr(self, attname, obj, args=None, default=None): """ Allows subclasses to provide an attribute (say, 'foo') in three different ways: As a fixed class-level property or as a method foo(self) or foo(self, obj). The second argument argument 'obj' is the "subject" of the current Feed invocation. See the Django Feed documentation for details. This method was shamelessly stolen from the Feed class and extended with the ability to pass additional arguments to subclass methods. """ try: attr = getattr(self, attname) except AttributeError: return default if callable(attr) or args: args = args[:] if args else [] # Check co_argcount rather than try/excepting the function and # catching the TypeError, because something inside the function # may raise the TypeError. This technique is more accurate. try: code = six.get_function_code(attr) except AttributeError: code = six.get_function_code(attr.__call__) if code.co_argcount == 2 + len(args): # one argument is 'self' args.append(obj) return attr(*args) return attr
python
def _get_bakery_dynamic_attr(self, attname, obj, args=None, default=None): """ Allows subclasses to provide an attribute (say, 'foo') in three different ways: As a fixed class-level property or as a method foo(self) or foo(self, obj). The second argument argument 'obj' is the "subject" of the current Feed invocation. See the Django Feed documentation for details. This method was shamelessly stolen from the Feed class and extended with the ability to pass additional arguments to subclass methods. """ try: attr = getattr(self, attname) except AttributeError: return default if callable(attr) or args: args = args[:] if args else [] # Check co_argcount rather than try/excepting the function and # catching the TypeError, because something inside the function # may raise the TypeError. This technique is more accurate. try: code = six.get_function_code(attr) except AttributeError: code = six.get_function_code(attr.__call__) if code.co_argcount == 2 + len(args): # one argument is 'self' args.append(obj) return attr(*args) return attr
Allows subclasses to provide an attribute (say, 'foo') in three different ways: As a fixed class-level property or as a method foo(self) or foo(self, obj). The second argument argument 'obj' is the "subject" of the current Feed invocation. See the Django Feed documentation for details. This method was shamelessly stolen from the Feed class and extended with the ability to pass additional arguments to subclass methods.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/feeds.py#L23-L53
datadesk/django-bakery
bakery/views/dates.py
BuildableYearArchiveView.get_year
def get_year(self): """ Return the year from the database in the format expected by the URL. """ year = super(BuildableYearArchiveView, self).get_year() fmt = self.get_year_format() return date(int(year), 1, 1).strftime(fmt)
python
def get_year(self): """ Return the year from the database in the format expected by the URL. """ year = super(BuildableYearArchiveView, self).get_year() fmt = self.get_year_format() return date(int(year), 1, 1).strftime(fmt)
Return the year from the database in the format expected by the URL.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/views/dates.py#L75-L81
datadesk/django-bakery
bakery/views/dates.py
BuildableYearArchiveView.build_year
def build_year(self, dt): """ Build the page for the provided year. """ self.year = str(dt.year) logger.debug("Building %s" % self.year) self.request = self.create_request(self.get_url()) target_path = self.get_build_path() self.build_file(target_path, self.get_content())
python
def build_year(self, dt): """ Build the page for the provided year. """ self.year = str(dt.year) logger.debug("Building %s" % self.year) self.request = self.create_request(self.get_url()) target_path = self.get_build_path() self.build_file(target_path, self.get_content())
Build the page for the provided year.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/views/dates.py#L103-L111
datadesk/django-bakery
bakery/views/dates.py
BuildableYearArchiveView.build_dated_queryset
def build_dated_queryset(self): """ Build pages for all years in the queryset. """ qs = self.get_dated_queryset() years = self.get_date_list(qs) [self.build_year(dt) for dt in years]
python
def build_dated_queryset(self): """ Build pages for all years in the queryset. """ qs = self.get_dated_queryset() years = self.get_date_list(qs) [self.build_year(dt) for dt in years]
Build pages for all years in the queryset.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/views/dates.py#L113-L119
datadesk/django-bakery
bakery/views/dates.py
BuildableMonthArchiveView.get_month
def get_month(self): """ Return the month from the database in the format expected by the URL. """ year = super(BuildableMonthArchiveView, self).get_year() month = super(BuildableMonthArchiveView, self).get_month() fmt = self.get_month_format() return date(int(year), int(month), 1).strftime(fmt)
python
def get_month(self): """ Return the month from the database in the format expected by the URL. """ year = super(BuildableMonthArchiveView, self).get_year() month = super(BuildableMonthArchiveView, self).get_month() fmt = self.get_month_format() return date(int(year), int(month), 1).strftime(fmt)
Return the month from the database in the format expected by the URL.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/views/dates.py#L159-L166
datadesk/django-bakery
bakery/views/dates.py
BuildableMonthArchiveView.build_dated_queryset
def build_dated_queryset(self): """ Build pages for all years in the queryset. """ qs = self.get_dated_queryset() months = self.get_date_list(qs) [self.build_month(dt) for dt in months]
python
def build_dated_queryset(self): """ Build pages for all years in the queryset. """ qs = self.get_dated_queryset() months = self.get_date_list(qs) [self.build_month(dt) for dt in months]
Build pages for all years in the queryset.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/views/dates.py#L200-L206
datadesk/django-bakery
bakery/views/dates.py
BuildableMonthArchiveView.unbuild_month
def unbuild_month(self, dt): """ Deletes the directory at self.get_build_path. """ self.year = str(dt.year) self.month = str(dt.month) logger.debug("Building %s-%s" % (self.year, self.month)) target_path = os.path.split(self.get_build_path())[0] if self.fs.exists(target_path): logger.debug("Removing {}".format(target_path)) self.fs.removetree(target_path)
python
def unbuild_month(self, dt): """ Deletes the directory at self.get_build_path. """ self.year = str(dt.year) self.month = str(dt.month) logger.debug("Building %s-%s" % (self.year, self.month)) target_path = os.path.split(self.get_build_path())[0] if self.fs.exists(target_path): logger.debug("Removing {}".format(target_path)) self.fs.removetree(target_path)
Deletes the directory at self.get_build_path.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/views/dates.py#L208-L218
datadesk/django-bakery
bakery/views/dates.py
BuildableDayArchiveView.get_year
def get_year(self): """ Return the year from the database in the format expected by the URL. """ year = super(BuildableDayArchiveView, self).get_year() fmt = self.get_year_format() dt = date(int(year), 1, 1) return dt.strftime(fmt)
python
def get_year(self): """ Return the year from the database in the format expected by the URL. """ year = super(BuildableDayArchiveView, self).get_year() fmt = self.get_year_format() dt = date(int(year), 1, 1) return dt.strftime(fmt)
Return the year from the database in the format expected by the URL.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/views/dates.py#L239-L246
datadesk/django-bakery
bakery/views/dates.py
BuildableDayArchiveView.get_month
def get_month(self): """ Return the month from the database in the format expected by the URL. """ year = super(BuildableDayArchiveView, self).get_year() month = super(BuildableDayArchiveView, self).get_month() fmt = self.get_month_format() dt = date(int(year), int(month), 1) return dt.strftime(fmt)
python
def get_month(self): """ Return the month from the database in the format expected by the URL. """ year = super(BuildableDayArchiveView, self).get_year() month = super(BuildableDayArchiveView, self).get_month() fmt = self.get_month_format() dt = date(int(year), int(month), 1) return dt.strftime(fmt)
Return the month from the database in the format expected by the URL.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/views/dates.py#L248-L256
datadesk/django-bakery
bakery/views/dates.py
BuildableDayArchiveView.get_day
def get_day(self): """ Return the day from the database in the format expected by the URL. """ year = super(BuildableDayArchiveView, self).get_year() month = super(BuildableDayArchiveView, self).get_month() day = super(BuildableDayArchiveView, self).get_day() fmt = self.get_day_format() dt = date(int(year), int(month), int(day)) return dt.strftime(fmt)
python
def get_day(self): """ Return the day from the database in the format expected by the URL. """ year = super(BuildableDayArchiveView, self).get_year() month = super(BuildableDayArchiveView, self).get_month() day = super(BuildableDayArchiveView, self).get_day() fmt = self.get_day_format() dt = date(int(year), int(month), int(day)) return dt.strftime(fmt)
Return the day from the database in the format expected by the URL.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/views/dates.py#L258-L267
datadesk/django-bakery
bakery/views/dates.py
BuildableDayArchiveView.get_url
def get_url(self): """ The URL at which the detail page should appear. By default it is /archive/ + the year in self.year_format + the month in self.month_format + the day in the self.day_format. An example would be /archive/2016/01/01/. """ return os.path.join( '/archive', self.get_year(), self.get_month(), self.get_day() )
python
def get_url(self): """ The URL at which the detail page should appear. By default it is /archive/ + the year in self.year_format + the month in self.month_format + the day in the self.day_format. An example would be /archive/2016/01/01/. """ return os.path.join( '/archive', self.get_year(), self.get_month(), self.get_day() )
The URL at which the detail page should appear. By default it is /archive/ + the year in self.year_format + the month in self.month_format + the day in the self.day_format. An example would be /archive/2016/01/01/.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/views/dates.py#L269-L282
datadesk/django-bakery
bakery/views/dates.py
BuildableDayArchiveView.get_build_path
def get_build_path(self): """ Used to determine where to build the page. Override this if you would like your page at a different location. By default it will be built at self.get_url() + "/index.html" """ target_path = path.join(settings.BUILD_DIR, self.get_url().lstrip('/')) if not self.fs.exists(target_path): logger.debug("Creating {}".format(target_path)) self.fs.makedirs(target_path) return os.path.join(target_path, 'index.html')
python
def get_build_path(self): """ Used to determine where to build the page. Override this if you would like your page at a different location. By default it will be built at self.get_url() + "/index.html" """ target_path = path.join(settings.BUILD_DIR, self.get_url().lstrip('/')) if not self.fs.exists(target_path): logger.debug("Creating {}".format(target_path)) self.fs.makedirs(target_path) return os.path.join(target_path, 'index.html')
Used to determine where to build the page. Override this if you would like your page at a different location. By default it will be built at self.get_url() + "/index.html"
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/views/dates.py#L284-L294
datadesk/django-bakery
bakery/views/dates.py
BuildableDayArchiveView.build_day
def build_day(self, dt): """ Build the page for the provided day. """ self.month = str(dt.month) self.year = str(dt.year) self.day = str(dt.day) logger.debug("Building %s-%s-%s" % (self.year, self.month, self.day)) self.request = self.create_request(self.get_url()) path = self.get_build_path() self.build_file(path, self.get_content())
python
def build_day(self, dt): """ Build the page for the provided day. """ self.month = str(dt.month) self.year = str(dt.year) self.day = str(dt.day) logger.debug("Building %s-%s-%s" % (self.year, self.month, self.day)) self.request = self.create_request(self.get_url()) path = self.get_build_path() self.build_file(path, self.get_content())
Build the page for the provided day.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/views/dates.py#L296-L306
datadesk/django-bakery
bakery/views/dates.py
BuildableDayArchiveView.build_dated_queryset
def build_dated_queryset(self): """ Build pages for all years in the queryset. """ qs = self.get_dated_queryset() days = self.get_date_list(qs, date_type='day') [self.build_day(dt) for dt in days]
python
def build_dated_queryset(self): """ Build pages for all years in the queryset. """ qs = self.get_dated_queryset() days = self.get_date_list(qs, date_type='day') [self.build_day(dt) for dt in days]
Build pages for all years in the queryset.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/views/dates.py#L308-L314
datadesk/django-bakery
bakery/management/commands/__init__.py
get_s3_client
def get_s3_client(): """ A DRY place to make sure AWS credentials in settings override environment based credentials. Boto3 will fall back to: http://boto3.readthedocs.io/en/latest/guide/configuration.html """ session_kwargs = {} if hasattr(settings, 'AWS_ACCESS_KEY_ID'): session_kwargs['aws_access_key_id'] = settings.AWS_ACCESS_KEY_ID if hasattr(settings, 'AWS_SECRET_ACCESS_KEY'): session_kwargs['aws_secret_access_key'] = settings.AWS_SECRET_ACCESS_KEY boto3.setup_default_session(**session_kwargs) s3_kwargs = {} if hasattr(settings, 'AWS_S3_ENDPOINT'): s3_kwargs['endpoint_url'] = settings.AWS_S3_ENDPOINT elif hasattr(settings, 'AWS_S3_HOST'): if hasattr(settings, 'AWS_S3_USE_SSL') and settings.AWS_S3_USE_SSL is False: protocol = "http://" else: protocol = "https://" s3_kwargs['endpoint_url'] = "{}{}".format( protocol, settings.AWS_S3_HOST ) if hasattr(settings, "AWS_REGION"): s3_kwargs['region_name'] = settings.AWS_REGION s3_client = boto3.client('s3', **s3_kwargs) s3_resource = boto3.resource('s3', **s3_kwargs) return s3_client, s3_resource
python
def get_s3_client(): """ A DRY place to make sure AWS credentials in settings override environment based credentials. Boto3 will fall back to: http://boto3.readthedocs.io/en/latest/guide/configuration.html """ session_kwargs = {} if hasattr(settings, 'AWS_ACCESS_KEY_ID'): session_kwargs['aws_access_key_id'] = settings.AWS_ACCESS_KEY_ID if hasattr(settings, 'AWS_SECRET_ACCESS_KEY'): session_kwargs['aws_secret_access_key'] = settings.AWS_SECRET_ACCESS_KEY boto3.setup_default_session(**session_kwargs) s3_kwargs = {} if hasattr(settings, 'AWS_S3_ENDPOINT'): s3_kwargs['endpoint_url'] = settings.AWS_S3_ENDPOINT elif hasattr(settings, 'AWS_S3_HOST'): if hasattr(settings, 'AWS_S3_USE_SSL') and settings.AWS_S3_USE_SSL is False: protocol = "http://" else: protocol = "https://" s3_kwargs['endpoint_url'] = "{}{}".format( protocol, settings.AWS_S3_HOST ) if hasattr(settings, "AWS_REGION"): s3_kwargs['region_name'] = settings.AWS_REGION s3_client = boto3.client('s3', **s3_kwargs) s3_resource = boto3.resource('s3', **s3_kwargs) return s3_client, s3_resource
A DRY place to make sure AWS credentials in settings override environment based credentials. Boto3 will fall back to: http://boto3.readthedocs.io/en/latest/guide/configuration.html
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/__init__.py#L8-L38
datadesk/django-bakery
bakery/management/commands/__init__.py
get_bucket_page
def get_bucket_page(page): """ Returns all the keys in a s3 bucket paginator page. """ key_list = page.get('Contents', []) logger.debug("Retrieving page with {} keys".format( len(key_list), )) return dict((k.get('Key'), k) for k in key_list)
python
def get_bucket_page(page): """ Returns all the keys in a s3 bucket paginator page. """ key_list = page.get('Contents', []) logger.debug("Retrieving page with {} keys".format( len(key_list), )) return dict((k.get('Key'), k) for k in key_list)
Returns all the keys in a s3 bucket paginator page.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/__init__.py#L41-L49
datadesk/django-bakery
bakery/management/commands/__init__.py
get_all_objects_in_bucket
def get_all_objects_in_bucket( aws_bucket_name, s3_client=None, max_keys=1000 ): """ Little utility method that handles pagination and returns all objects in given bucket. """ logger.debug("Retrieving bucket object list") if not s3_client: s3_client, s3_resource = get_s3_client() obj_dict = {} paginator = s3_client.get_paginator('list_objects') page_iterator = paginator.paginate(Bucket=aws_bucket_name) for page in page_iterator: key_list = page.get('Contents', []) logger.debug("Loading page with {} keys".format(len(key_list))) for obj in key_list: obj_dict[obj.get('Key')] = obj return obj_dict
python
def get_all_objects_in_bucket( aws_bucket_name, s3_client=None, max_keys=1000 ): """ Little utility method that handles pagination and returns all objects in given bucket. """ logger.debug("Retrieving bucket object list") if not s3_client: s3_client, s3_resource = get_s3_client() obj_dict = {} paginator = s3_client.get_paginator('list_objects') page_iterator = paginator.paginate(Bucket=aws_bucket_name) for page in page_iterator: key_list = page.get('Contents', []) logger.debug("Loading page with {} keys".format(len(key_list))) for obj in key_list: obj_dict[obj.get('Key')] = obj return obj_dict
Little utility method that handles pagination and returns all objects in given bucket.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/__init__.py#L52-L74
datadesk/django-bakery
bakery/management/commands/__init__.py
batch_delete_s3_objects
def batch_delete_s3_objects( keys, aws_bucket_name, chunk_size=100, s3_client=None ): """ Utility method that batch deletes objects in given bucket. """ if s3_client is None: s3_client, s3_resource = get_s3_client() key_chunks = [] for i in range(0, len(keys), chunk_size): chunk = [] for key in (list(keys)[i:i+100]): chunk.append({'Key': key}) key_chunks.append(chunk) for chunk in key_chunks: s3_client.delete_objects( Bucket=aws_bucket_name, Delete={'Objects': chunk} )
python
def batch_delete_s3_objects( keys, aws_bucket_name, chunk_size=100, s3_client=None ): """ Utility method that batch deletes objects in given bucket. """ if s3_client is None: s3_client, s3_resource = get_s3_client() key_chunks = [] for i in range(0, len(keys), chunk_size): chunk = [] for key in (list(keys)[i:i+100]): chunk.append({'Key': key}) key_chunks.append(chunk) for chunk in key_chunks: s3_client.delete_objects( Bucket=aws_bucket_name, Delete={'Objects': chunk} )
Utility method that batch deletes objects in given bucket.
https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/__init__.py#L77-L99
python-parsy/parsy
examples/proto3.py
lexeme
def lexeme(p): """ From a parser (or string), make a parser that consumes whitespace on either side. """ if isinstance(p, str): p = string(p) return regex(r'\s*') >> p << regex(r'\s*')
python
def lexeme(p): """ From a parser (or string), make a parser that consumes whitespace on either side. """ if isinstance(p, str): p = string(p) return regex(r'\s*') >> p << regex(r'\s*')
From a parser (or string), make a parser that consumes whitespace on either side.
https://github.com/python-parsy/parsy/blob/04216ed25b6cdb389a36e16998592ba476237f86/examples/proto3.py#L39-L46
python-parsy/parsy
examples/proto3.py
is_present
def is_present(p): """ Given a parser or string, make a parser that returns True if the parser matches, False otherwise """ return lexeme(p).optional().map(lambda v: False if v is None else True)
python
def is_present(p): """ Given a parser or string, make a parser that returns True if the parser matches, False otherwise """ return lexeme(p).optional().map(lambda v: False if v is None else True)
Given a parser or string, make a parser that returns True if the parser matches, False otherwise
https://github.com/python-parsy/parsy/blob/04216ed25b6cdb389a36e16998592ba476237f86/examples/proto3.py#L49-L54
python-parsy/parsy
src/parsy/__init__.py
Parser.parse
def parse(self, stream): """Parse a string or list of tokens and return the result or raise a ParseError.""" (result, _) = (self << eof).parse_partial(stream) return result
python
def parse(self, stream): """Parse a string or list of tokens and return the result or raise a ParseError.""" (result, _) = (self << eof).parse_partial(stream) return result
Parse a string or list of tokens and return the result or raise a ParseError.
https://github.com/python-parsy/parsy/blob/04216ed25b6cdb389a36e16998592ba476237f86/src/parsy/__init__.py#L88-L91
python-parsy/parsy
src/parsy/__init__.py
Parser.parse_partial
def parse_partial(self, stream): """ Parse the longest possible prefix of a given string. Return a tuple of the result and the rest of the string, or raise a ParseError. """ result = self(stream, 0) if result.status: return (result.value, stream[result.index:]) else: raise ParseError(result.expected, stream, result.furthest)
python
def parse_partial(self, stream): """ Parse the longest possible prefix of a given string. Return a tuple of the result and the rest of the string, or raise a ParseError. """ result = self(stream, 0) if result.status: return (result.value, stream[result.index:]) else: raise ParseError(result.expected, stream, result.furthest)
Parse the longest possible prefix of a given string. Return a tuple of the result and the rest of the string, or raise a ParseError.
https://github.com/python-parsy/parsy/blob/04216ed25b6cdb389a36e16998592ba476237f86/src/parsy/__init__.py#L93-L104
scrapinghub/flatson
flatson/flatson.py
extract_key_values
def extract_key_values(array_value, separators=(';', ',', ':'), **kwargs): """Serialize array of objects with simple key-values """ items_sep, fields_sep, keys_sep = separators return items_sep.join(fields_sep.join(keys_sep.join(x) for x in sorted(it.items())) for it in array_value)
python
def extract_key_values(array_value, separators=(';', ',', ':'), **kwargs): """Serialize array of objects with simple key-values """ items_sep, fields_sep, keys_sep = separators return items_sep.join(fields_sep.join(keys_sep.join(x) for x in sorted(it.items())) for it in array_value)
Serialize array of objects with simple key-values
https://github.com/scrapinghub/flatson/blob/dcbcea32ad6d4df1df85fff8366bce40438d469a/flatson/flatson.py#L48-L53
scrapinghub/flatson
flatson/flatson.py
Flatson.from_schemafile
def from_schemafile(cls, schemafile): """Create a Flatson instance from a schemafile """ with open(schemafile) as f: return cls(json.load(f))
python
def from_schemafile(cls, schemafile): """Create a Flatson instance from a schemafile """ with open(schemafile) as f: return cls(json.load(f))
Create a Flatson instance from a schemafile
https://github.com/scrapinghub/flatson/blob/dcbcea32ad6d4df1df85fff8366bce40438d469a/flatson/flatson.py#L93-L97
scrapinghub/flatson
flatson/flatson.py
Flatson.register_serialization_method
def register_serialization_method(self, name, serialize_func): """Register a custom serialization method that can be used via schema configuration """ if name in self._default_serialization_methods: raise ValueError("Can't replace original %s serialization method") self._serialization_methods[name] = serialize_func
python
def register_serialization_method(self, name, serialize_func): """Register a custom serialization method that can be used via schema configuration """ if name in self._default_serialization_methods: raise ValueError("Can't replace original %s serialization method") self._serialization_methods[name] = serialize_func
Register a custom serialization method that can be used via schema configuration
https://github.com/scrapinghub/flatson/blob/dcbcea32ad6d4df1df85fff8366bce40438d469a/flatson/flatson.py#L123-L129
scrapinghub/flatson
flatson/flatson.py
Flatson.flatten
def flatten(self, obj): """Return a list with the field values """ return [self._serialize(f, obj) for f in self.fields]
python
def flatten(self, obj): """Return a list with the field values """ return [self._serialize(f, obj) for f in self.fields]
Return a list with the field values
https://github.com/scrapinghub/flatson/blob/dcbcea32ad6d4df1df85fff8366bce40438d469a/flatson/flatson.py#L131-L134
scrapinghub/flatson
flatson/flatson.py
Flatson.flatten_dict
def flatten_dict(self, obj): """Return an OrderedDict dict preserving order of keys in fieldnames """ return OrderedDict(zip(self.fieldnames, self.flatten(obj)))
python
def flatten_dict(self, obj): """Return an OrderedDict dict preserving order of keys in fieldnames """ return OrderedDict(zip(self.fieldnames, self.flatten(obj)))
Return an OrderedDict dict preserving order of keys in fieldnames
https://github.com/scrapinghub/flatson/blob/dcbcea32ad6d4df1df85fff8366bce40438d469a/flatson/flatson.py#L136-L139
gmr/queries
queries/pool.py
Connection.close
def close(self): """Close the connection :raises: ConnectionBusyError """ LOGGER.debug('Connection %s closing', self.id) if self.busy: raise ConnectionBusyError(self) with self._lock: if not self.handle.closed: try: self.handle.close() except psycopg2.InterfaceError as error: LOGGER.error('Error closing socket: %s', error)
python
def close(self): """Close the connection :raises: ConnectionBusyError """ LOGGER.debug('Connection %s closing', self.id) if self.busy: raise ConnectionBusyError(self) with self._lock: if not self.handle.closed: try: self.handle.close() except psycopg2.InterfaceError as error: LOGGER.error('Error closing socket: %s', error)
Close the connection :raises: ConnectionBusyError
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L33-L47
gmr/queries
queries/pool.py
Connection.busy
def busy(self): """Return if the connection is currently executing a query or is locked by a session that still exists. :rtype: bool """ if self.handle.isexecuting(): return True elif self.used_by is None: return False return not self.used_by() is None
python
def busy(self): """Return if the connection is currently executing a query or is locked by a session that still exists. :rtype: bool """ if self.handle.isexecuting(): return True elif self.used_by is None: return False return not self.used_by() is None
Return if the connection is currently executing a query or is locked by a session that still exists. :rtype: bool
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L59-L70
gmr/queries
queries/pool.py
Connection.free
def free(self): """Remove the lock on the connection if the connection is not active :raises: ConnectionBusyError """ LOGGER.debug('Connection %s freeing', self.id) if self.handle.isexecuting(): raise ConnectionBusyError(self) with self._lock: self.used_by = None LOGGER.debug('Connection %s freed', self.id)
python
def free(self): """Remove the lock on the connection if the connection is not active :raises: ConnectionBusyError """ LOGGER.debug('Connection %s freeing', self.id) if self.handle.isexecuting(): raise ConnectionBusyError(self) with self._lock: self.used_by = None LOGGER.debug('Connection %s freed', self.id)
Remove the lock on the connection if the connection is not active :raises: ConnectionBusyError
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L81-L92
gmr/queries
queries/pool.py
Connection.lock
def lock(self, session): """Lock the connection, ensuring that it is not busy and storing a weakref for the session. :param queries.Session session: The session to lock the connection with :raises: ConnectionBusyError """ if self.busy: raise ConnectionBusyError(self) with self._lock: self.used_by = weakref.ref(session) LOGGER.debug('Connection %s locked', self.id)
python
def lock(self, session): """Lock the connection, ensuring that it is not busy and storing a weakref for the session. :param queries.Session session: The session to lock the connection with :raises: ConnectionBusyError """ if self.busy: raise ConnectionBusyError(self) with self._lock: self.used_by = weakref.ref(session) LOGGER.debug('Connection %s locked', self.id)
Lock the connection, ensuring that it is not busy and storing a weakref for the session. :param queries.Session session: The session to lock the connection with :raises: ConnectionBusyError
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L103-L115
gmr/queries
queries/pool.py
Pool.add
def add(self, connection): """Add a new connection to the pool :param connection: The connection to add to the pool :type connection: psycopg2.extensions.connection :raises: PoolFullError """ if id(connection) in self.connections: raise ValueError('Connection already exists in pool') if len(self.connections) == self.max_size: LOGGER.warning('Race condition found when adding new connection') try: connection.close() except (psycopg2.Error, psycopg2.Warning) as error: LOGGER.error('Error closing the conn that cant be used: %s', error) raise PoolFullError(self) with self._lock: self.connections[id(connection)] = Connection(connection) LOGGER.debug('Pool %s added connection %s', self.id, id(connection))
python
def add(self, connection): """Add a new connection to the pool :param connection: The connection to add to the pool :type connection: psycopg2.extensions.connection :raises: PoolFullError """ if id(connection) in self.connections: raise ValueError('Connection already exists in pool') if len(self.connections) == self.max_size: LOGGER.warning('Race condition found when adding new connection') try: connection.close() except (psycopg2.Error, psycopg2.Warning) as error: LOGGER.error('Error closing the conn that cant be used: %s', error) raise PoolFullError(self) with self._lock: self.connections[id(connection)] = Connection(connection) LOGGER.debug('Pool %s added connection %s', self.id, id(connection))
Add a new connection to the pool :param connection: The connection to add to the pool :type connection: psycopg2.extensions.connection :raises: PoolFullError
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L154-L175
gmr/queries
queries/pool.py
Pool.busy_connections
def busy_connections(self): """Return a list of active/busy connections :rtype: list """ return [c for c in self.connections.values() if c.busy and not c.closed]
python
def busy_connections(self): """Return a list of active/busy connections :rtype: list """ return [c for c in self.connections.values() if c.busy and not c.closed]
Return a list of active/busy connections :rtype: list
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L178-L185
gmr/queries
queries/pool.py
Pool.clean
def clean(self): """Clean the pool by removing any closed connections and if the pool's idle has exceeded its idle TTL, remove all connections. """ LOGGER.debug('Cleaning the pool') for connection in [self.connections[k] for k in self.connections if self.connections[k].closed]: LOGGER.debug('Removing %s', connection.id) self.remove(connection.handle) if self.idle_duration > self.idle_ttl: self.close() LOGGER.debug('Pool %s cleaned', self.id)
python
def clean(self): """Clean the pool by removing any closed connections and if the pool's idle has exceeded its idle TTL, remove all connections. """ LOGGER.debug('Cleaning the pool') for connection in [self.connections[k] for k in self.connections if self.connections[k].closed]: LOGGER.debug('Removing %s', connection.id) self.remove(connection.handle) if self.idle_duration > self.idle_ttl: self.close() LOGGER.debug('Pool %s cleaned', self.id)
Clean the pool by removing any closed connections and if the pool's idle has exceeded its idle TTL, remove all connections.
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L187-L201
gmr/queries
queries/pool.py
Pool.close
def close(self): """Close the pool by closing and removing all of the connections""" for cid in list(self.connections.keys()): self.remove(self.connections[cid].handle) LOGGER.debug('Pool %s closed', self.id)
python
def close(self): """Close the pool by closing and removing all of the connections""" for cid in list(self.connections.keys()): self.remove(self.connections[cid].handle) LOGGER.debug('Pool %s closed', self.id)
Close the pool by closing and removing all of the connections
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L203-L207
gmr/queries
queries/pool.py
Pool.free
def free(self, connection): """Free the connection from use by the session that was using it. :param connection: The connection to free :type connection: psycopg2.extensions.connection :raises: ConnectionNotFoundError """ LOGGER.debug('Pool %s freeing connection %s', self.id, id(connection)) try: self.connection_handle(connection).free() except KeyError: raise ConnectionNotFoundError(self.id, id(connection)) if self.idle_connections == list(self.connections.values()): with self._lock: self.idle_start = self.time_method() LOGGER.debug('Pool %s freed connection %s', self.id, id(connection))
python
def free(self, connection): """Free the connection from use by the session that was using it. :param connection: The connection to free :type connection: psycopg2.extensions.connection :raises: ConnectionNotFoundError """ LOGGER.debug('Pool %s freeing connection %s', self.id, id(connection)) try: self.connection_handle(connection).free() except KeyError: raise ConnectionNotFoundError(self.id, id(connection)) if self.idle_connections == list(self.connections.values()): with self._lock: self.idle_start = self.time_method() LOGGER.debug('Pool %s freed connection %s', self.id, id(connection))
Free the connection from use by the session that was using it. :param connection: The connection to free :type connection: psycopg2.extensions.connection :raises: ConnectionNotFoundError
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L237-L254
gmr/queries
queries/pool.py
Pool.get
def get(self, session): """Return an idle connection and assign the session to the connection :param queries.Session session: The session to assign :rtype: psycopg2.extensions.connection :raises: NoIdleConnectionsError """ idle = self.idle_connections if idle: connection = idle.pop(0) connection.lock(session) if self.idle_start: with self._lock: self.idle_start = None return connection.handle raise NoIdleConnectionsError(self.id)
python
def get(self, session): """Return an idle connection and assign the session to the connection :param queries.Session session: The session to assign :rtype: psycopg2.extensions.connection :raises: NoIdleConnectionsError """ idle = self.idle_connections if idle: connection = idle.pop(0) connection.lock(session) if self.idle_start: with self._lock: self.idle_start = None return connection.handle raise NoIdleConnectionsError(self.id)
Return an idle connection and assign the session to the connection :param queries.Session session: The session to assign :rtype: psycopg2.extensions.connection :raises: NoIdleConnectionsError
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L256-L272
gmr/queries
queries/pool.py
Pool.idle_connections
def idle_connections(self): """Return a list of idle connections :rtype: list """ return [c for c in self.connections.values() if not c.busy and not c.closed]
python
def idle_connections(self): """Return a list of idle connections :rtype: list """ return [c for c in self.connections.values() if not c.busy and not c.closed]
Return a list of idle connections :rtype: list
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L284-L291
gmr/queries
queries/pool.py
Pool.lock
def lock(self, connection, session): """Explicitly lock the specified connection :type connection: psycopg2.extensions.connection :param connection: The connection to lock :param queries.Session session: The session to hold the lock """ cid = id(connection) try: self.connection_handle(connection).lock(session) except KeyError: raise ConnectionNotFoundError(self.id, cid) else: if self.idle_start: with self._lock: self.idle_start = None LOGGER.debug('Pool %s locked connection %s', self.id, cid)
python
def lock(self, connection, session): """Explicitly lock the specified connection :type connection: psycopg2.extensions.connection :param connection: The connection to lock :param queries.Session session: The session to hold the lock """ cid = id(connection) try: self.connection_handle(connection).lock(session) except KeyError: raise ConnectionNotFoundError(self.id, cid) else: if self.idle_start: with self._lock: self.idle_start = None LOGGER.debug('Pool %s locked connection %s', self.id, cid)
Explicitly lock the specified connection :type connection: psycopg2.extensions.connection :param connection: The connection to lock :param queries.Session session: The session to hold the lock
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L314-L331
gmr/queries
queries/pool.py
Pool.remove
def remove(self, connection): """Remove the connection from the pool :param connection: The connection to remove :type connection: psycopg2.extensions.connection :raises: ConnectionNotFoundError :raises: ConnectionBusyError """ cid = id(connection) if cid not in self.connections: raise ConnectionNotFoundError(self.id, cid) self.connection_handle(connection).close() with self._lock: del self.connections[cid] LOGGER.debug('Pool %s removed connection %s', self.id, cid)
python
def remove(self, connection): """Remove the connection from the pool :param connection: The connection to remove :type connection: psycopg2.extensions.connection :raises: ConnectionNotFoundError :raises: ConnectionBusyError """ cid = id(connection) if cid not in self.connections: raise ConnectionNotFoundError(self.id, cid) self.connection_handle(connection).close() with self._lock: del self.connections[cid] LOGGER.debug('Pool %s removed connection %s', self.id, cid)
Remove the connection from the pool :param connection: The connection to remove :type connection: psycopg2.extensions.connection :raises: ConnectionNotFoundError :raises: ConnectionBusyError
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L342-L357
gmr/queries
queries/pool.py
Pool.report
def report(self): """Return a report about the pool state and configuration. :rtype: dict """ return { 'connections': { 'busy': len(self.busy_connections), 'closed': len(self.closed_connections), 'executing': len(self.executing_connections), 'idle': len(self.idle_connections), 'locked': len(self.busy_connections) }, 'exceptions': sum([c.exceptions for c in self.connections.values()]), 'executions': sum([c.executions for c in self.connections.values()]), 'full': self.is_full, 'idle': { 'duration': self.idle_duration, 'ttl': self.idle_ttl }, 'max_size': self.max_size }
python
def report(self): """Return a report about the pool state and configuration. :rtype: dict """ return { 'connections': { 'busy': len(self.busy_connections), 'closed': len(self.closed_connections), 'executing': len(self.executing_connections), 'idle': len(self.idle_connections), 'locked': len(self.busy_connections) }, 'exceptions': sum([c.exceptions for c in self.connections.values()]), 'executions': sum([c.executions for c in self.connections.values()]), 'full': self.is_full, 'idle': { 'duration': self.idle_duration, 'ttl': self.idle_ttl }, 'max_size': self.max_size }
Return a report about the pool state and configuration. :rtype: dict
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L359-L383
gmr/queries
queries/pool.py
Pool.shutdown
def shutdown(self): """Forcefully shutdown the entire pool, closing all non-executing connections. :raises: ConnectionBusyError """ with self._lock: for cid in list(self.connections.keys()): if self.connections[cid].executing: raise ConnectionBusyError(cid) if self.connections[cid].locked: self.connections[cid].free() self.connections[cid].close() del self.connections[cid]
python
def shutdown(self): """Forcefully shutdown the entire pool, closing all non-executing connections. :raises: ConnectionBusyError """ with self._lock: for cid in list(self.connections.keys()): if self.connections[cid].executing: raise ConnectionBusyError(cid) if self.connections[cid].locked: self.connections[cid].free() self.connections[cid].close() del self.connections[cid]
Forcefully shutdown the entire pool, closing all non-executing connections. :raises: ConnectionBusyError
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L385-L399
gmr/queries
queries/pool.py
PoolManager.add
def add(cls, pid, connection): """Add a new connection and session to a pool. :param str pid: The pool id :type connection: psycopg2.extensions.connection :param connection: The connection to add to the pool """ with cls._lock: cls._ensure_pool_exists(pid) cls._pools[pid].add(connection)
python
def add(cls, pid, connection): """Add a new connection and session to a pool. :param str pid: The pool id :type connection: psycopg2.extensions.connection :param connection: The connection to add to the pool """ with cls._lock: cls._ensure_pool_exists(pid) cls._pools[pid].add(connection)
Add a new connection and session to a pool. :param str pid: The pool id :type connection: psycopg2.extensions.connection :param connection: The connection to add to the pool
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L454-L464
gmr/queries
queries/pool.py
PoolManager.clean
def clean(cls, pid): """Clean the specified pool, removing any closed connections or stale locks. :param str pid: The pool id to clean """ with cls._lock: try: cls._ensure_pool_exists(pid) except KeyError: LOGGER.debug('Pool clean invoked against missing pool %s', pid) return cls._pools[pid].clean() cls._maybe_remove_pool(pid)
python
def clean(cls, pid): """Clean the specified pool, removing any closed connections or stale locks. :param str pid: The pool id to clean """ with cls._lock: try: cls._ensure_pool_exists(pid) except KeyError: LOGGER.debug('Pool clean invoked against missing pool %s', pid) return cls._pools[pid].clean() cls._maybe_remove_pool(pid)
Clean the specified pool, removing any closed connections or stale locks. :param str pid: The pool id to clean
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L467-L481
gmr/queries
queries/pool.py
PoolManager.create
def create(cls, pid, idle_ttl=DEFAULT_IDLE_TTL, max_size=DEFAULT_MAX_SIZE, time_method=None): """Create a new pool, with the ability to pass in values to override the default idle TTL and the default maximum size. A pool's idle TTL defines the amount of time that a pool can be open without any sessions before it is removed. A pool's max size defines the maximum number of connections that can be added to the pool to prevent unbounded open connections. :param str pid: The pool ID :param int idle_ttl: Time in seconds for the idle TTL :param int max_size: The maximum pool size :param callable time_method: Override the use of :py:meth:`time.time` method for time values. :raises: KeyError """ if pid in cls._pools: raise KeyError('Pool %s already exists' % pid) with cls._lock: LOGGER.debug("Creating Pool: %s (%i/%i)", pid, idle_ttl, max_size) cls._pools[pid] = Pool(pid, idle_ttl, max_size, time_method)
python
def create(cls, pid, idle_ttl=DEFAULT_IDLE_TTL, max_size=DEFAULT_MAX_SIZE, time_method=None): """Create a new pool, with the ability to pass in values to override the default idle TTL and the default maximum size. A pool's idle TTL defines the amount of time that a pool can be open without any sessions before it is removed. A pool's max size defines the maximum number of connections that can be added to the pool to prevent unbounded open connections. :param str pid: The pool ID :param int idle_ttl: Time in seconds for the idle TTL :param int max_size: The maximum pool size :param callable time_method: Override the use of :py:meth:`time.time` method for time values. :raises: KeyError """ if pid in cls._pools: raise KeyError('Pool %s already exists' % pid) with cls._lock: LOGGER.debug("Creating Pool: %s (%i/%i)", pid, idle_ttl, max_size) cls._pools[pid] = Pool(pid, idle_ttl, max_size, time_method)
Create a new pool, with the ability to pass in values to override the default idle TTL and the default maximum size. A pool's idle TTL defines the amount of time that a pool can be open without any sessions before it is removed. A pool's max size defines the maximum number of connections that can be added to the pool to prevent unbounded open connections. :param str pid: The pool ID :param int idle_ttl: Time in seconds for the idle TTL :param int max_size: The maximum pool size :param callable time_method: Override the use of :py:meth:`time.time` method for time values. :raises: KeyError
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L484-L507
gmr/queries
queries/pool.py
PoolManager.free
def free(cls, pid, connection): """Free a connection that was locked by a session :param str pid: The pool ID :param connection: The connection to remove :type connection: psycopg2.extensions.connection """ with cls._lock: LOGGER.debug('Freeing %s from pool %s', id(connection), pid) cls._ensure_pool_exists(pid) cls._pools[pid].free(connection)
python
def free(cls, pid, connection): """Free a connection that was locked by a session :param str pid: The pool ID :param connection: The connection to remove :type connection: psycopg2.extensions.connection """ with cls._lock: LOGGER.debug('Freeing %s from pool %s', id(connection), pid) cls._ensure_pool_exists(pid) cls._pools[pid].free(connection)
Free a connection that was locked by a session :param str pid: The pool ID :param connection: The connection to remove :type connection: psycopg2.extensions.connection
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L510-L521
gmr/queries
queries/pool.py
PoolManager.get
def get(cls, pid, session): """Get an idle, unused connection from the pool. Once a connection has been retrieved, it will be marked as in-use until it is freed. :param str pid: The pool ID :param queries.Session session: The session to assign to the connection :rtype: psycopg2.extensions.connection """ with cls._lock: cls._ensure_pool_exists(pid) return cls._pools[pid].get(session)
python
def get(cls, pid, session): """Get an idle, unused connection from the pool. Once a connection has been retrieved, it will be marked as in-use until it is freed. :param str pid: The pool ID :param queries.Session session: The session to assign to the connection :rtype: psycopg2.extensions.connection """ with cls._lock: cls._ensure_pool_exists(pid) return cls._pools[pid].get(session)
Get an idle, unused connection from the pool. Once a connection has been retrieved, it will be marked as in-use until it is freed. :param str pid: The pool ID :param queries.Session session: The session to assign to the connection :rtype: psycopg2.extensions.connection
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L524-L535
gmr/queries
queries/pool.py
PoolManager.get_connection
def get_connection(cls, pid, connection): """Return the specified :class:`~queries.pool.Connection` from the pool. :param str pid: The pool ID :param connection: The connection to return for :type connection: psycopg2.extensions.connection :rtype: queries.pool.Connection """ with cls._lock: return cls._pools[pid].connection_handle(connection)
python
def get_connection(cls, pid, connection): """Return the specified :class:`~queries.pool.Connection` from the pool. :param str pid: The pool ID :param connection: The connection to return for :type connection: psycopg2.extensions.connection :rtype: queries.pool.Connection """ with cls._lock: return cls._pools[pid].connection_handle(connection)
Return the specified :class:`~queries.pool.Connection` from the pool. :param str pid: The pool ID :param connection: The connection to return for :type connection: psycopg2.extensions.connection :rtype: queries.pool.Connection
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L538-L549
gmr/queries
queries/pool.py
PoolManager.has_connection
def has_connection(cls, pid, connection): """Check to see if a pool has the specified connection :param str pid: The pool ID :param connection: The connection to check for :type connection: psycopg2.extensions.connection :rtype: bool """ with cls._lock: cls._ensure_pool_exists(pid) return connection in cls._pools[pid]
python
def has_connection(cls, pid, connection): """Check to see if a pool has the specified connection :param str pid: The pool ID :param connection: The connection to check for :type connection: psycopg2.extensions.connection :rtype: bool """ with cls._lock: cls._ensure_pool_exists(pid) return connection in cls._pools[pid]
Check to see if a pool has the specified connection :param str pid: The pool ID :param connection: The connection to check for :type connection: psycopg2.extensions.connection :rtype: bool
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L552-L563
gmr/queries
queries/pool.py
PoolManager.has_idle_connection
def has_idle_connection(cls, pid): """Check to see if a pool has an idle connection :param str pid: The pool ID :rtype: bool """ with cls._lock: cls._ensure_pool_exists(pid) return bool(cls._pools[pid].idle_connections)
python
def has_idle_connection(cls, pid): """Check to see if a pool has an idle connection :param str pid: The pool ID :rtype: bool """ with cls._lock: cls._ensure_pool_exists(pid) return bool(cls._pools[pid].idle_connections)
Check to see if a pool has an idle connection :param str pid: The pool ID :rtype: bool
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L566-L575
gmr/queries
queries/pool.py
PoolManager.is_full
def is_full(cls, pid): """Return a bool indicating if the specified pool is full :param str pid: The pool id :rtype: bool """ with cls._lock: cls._ensure_pool_exists(pid) return cls._pools[pid].is_full
python
def is_full(cls, pid): """Return a bool indicating if the specified pool is full :param str pid: The pool id :rtype: bool """ with cls._lock: cls._ensure_pool_exists(pid) return cls._pools[pid].is_full
Return a bool indicating if the specified pool is full :param str pid: The pool id :rtype: bool
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L578-L587
gmr/queries
queries/pool.py
PoolManager.lock
def lock(cls, pid, connection, session): """Explicitly lock the specified connection in the pool :param str pid: The pool id :type connection: psycopg2.extensions.connection :param connection: The connection to add to the pool :param queries.Session session: The session to hold the lock """ with cls._lock: cls._ensure_pool_exists(pid) cls._pools[pid].lock(connection, session)
python
def lock(cls, pid, connection, session): """Explicitly lock the specified connection in the pool :param str pid: The pool id :type connection: psycopg2.extensions.connection :param connection: The connection to add to the pool :param queries.Session session: The session to hold the lock """ with cls._lock: cls._ensure_pool_exists(pid) cls._pools[pid].lock(connection, session)
Explicitly lock the specified connection in the pool :param str pid: The pool id :type connection: psycopg2.extensions.connection :param connection: The connection to add to the pool :param queries.Session session: The session to hold the lock
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L590-L601
gmr/queries
queries/pool.py
PoolManager.remove
def remove(cls, pid): """Remove a pool, closing all connections :param str pid: The pool ID """ with cls._lock: cls._ensure_pool_exists(pid) cls._pools[pid].close() del cls._pools[pid]
python
def remove(cls, pid): """Remove a pool, closing all connections :param str pid: The pool ID """ with cls._lock: cls._ensure_pool_exists(pid) cls._pools[pid].close() del cls._pools[pid]
Remove a pool, closing all connections :param str pid: The pool ID
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L604-L613
gmr/queries
queries/pool.py
PoolManager.remove_connection
def remove_connection(cls, pid, connection): """Remove a connection from the pool, closing it if is open. :param str pid: The pool ID :param connection: The connection to remove :type connection: psycopg2.extensions.connection :raises: ConnectionNotFoundError """ cls._ensure_pool_exists(pid) cls._pools[pid].remove(connection)
python
def remove_connection(cls, pid, connection): """Remove a connection from the pool, closing it if is open. :param str pid: The pool ID :param connection: The connection to remove :type connection: psycopg2.extensions.connection :raises: ConnectionNotFoundError """ cls._ensure_pool_exists(pid) cls._pools[pid].remove(connection)
Remove a connection from the pool, closing it if is open. :param str pid: The pool ID :param connection: The connection to remove :type connection: psycopg2.extensions.connection :raises: ConnectionNotFoundError
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L616-L626
gmr/queries
queries/pool.py
PoolManager.set_idle_ttl
def set_idle_ttl(cls, pid, ttl): """Set the idle TTL for a pool, after which it will be destroyed. :param str pid: The pool id :param int ttl: The TTL for an idle pool """ with cls._lock: cls._ensure_pool_exists(pid) cls._pools[pid].set_idle_ttl(ttl)
python
def set_idle_ttl(cls, pid, ttl): """Set the idle TTL for a pool, after which it will be destroyed. :param str pid: The pool id :param int ttl: The TTL for an idle pool """ with cls._lock: cls._ensure_pool_exists(pid) cls._pools[pid].set_idle_ttl(ttl)
Set the idle TTL for a pool, after which it will be destroyed. :param str pid: The pool id :param int ttl: The TTL for an idle pool
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L629-L638
gmr/queries
queries/pool.py
PoolManager.set_max_size
def set_max_size(cls, pid, size): """Set the maximum number of connections for the specified pool :param str pid: The pool to set the size for :param int size: The maximum number of connections """ with cls._lock: cls._ensure_pool_exists(pid) cls._pools[pid].set_max_size(size)
python
def set_max_size(cls, pid, size): """Set the maximum number of connections for the specified pool :param str pid: The pool to set the size for :param int size: The maximum number of connections """ with cls._lock: cls._ensure_pool_exists(pid) cls._pools[pid].set_max_size(size)
Set the maximum number of connections for the specified pool :param str pid: The pool to set the size for :param int size: The maximum number of connections
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L641-L650
gmr/queries
queries/pool.py
PoolManager.shutdown
def shutdown(cls): """Close all connections on in all pools""" for pid in list(cls._pools.keys()): cls._pools[pid].shutdown() LOGGER.info('Shutdown complete, all pooled connections closed')
python
def shutdown(cls): """Close all connections on in all pools""" for pid in list(cls._pools.keys()): cls._pools[pid].shutdown() LOGGER.info('Shutdown complete, all pooled connections closed')
Close all connections on in all pools
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L653-L657
gmr/queries
queries/pool.py
PoolManager.size
def size(cls, pid): """Return the number of connections in the pool :param str pid: The pool id :rtype int """ with cls._lock: cls._ensure_pool_exists(pid) return len(cls._pools[pid])
python
def size(cls, pid): """Return the number of connections in the pool :param str pid: The pool id :rtype int """ with cls._lock: cls._ensure_pool_exists(pid) return len(cls._pools[pid])
Return the number of connections in the pool :param str pid: The pool id :rtype int
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L660-L669
gmr/queries
queries/pool.py
PoolManager.report
def report(cls): """Return the state of the all of the registered pools. :rtype: dict """ return { 'timestamp': datetime.datetime.utcnow().isoformat(), 'process': os.getpid(), 'pools': dict([(i, p.report()) for i, p in cls._pools.items()]) }
python
def report(cls): """Return the state of the all of the registered pools. :rtype: dict """ return { 'timestamp': datetime.datetime.utcnow().isoformat(), 'process': os.getpid(), 'pools': dict([(i, p.report()) for i, p in cls._pools.items()]) }
Return the state of the all of the registered pools. :rtype: dict
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L672-L682
gmr/queries
queries/pool.py
PoolManager._maybe_remove_pool
def _maybe_remove_pool(cls, pid): """If the pool has no open connections, remove it :param str pid: The pool id to clean """ if not len(cls._pools[pid]): del cls._pools[pid]
python
def _maybe_remove_pool(cls, pid): """If the pool has no open connections, remove it :param str pid: The pool id to clean """ if not len(cls._pools[pid]): del cls._pools[pid]
If the pool has no open connections, remove it :param str pid: The pool id to clean
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L697-L704
gmr/queries
queries/session.py
Session.callproc
def callproc(self, name, args=None): """Call a stored procedure on the server, returning the results in a :py:class:`queries.Results` instance. :param str name: The procedure name :param list args: The list of arguments to pass in :rtype: queries.Results :raises: queries.DataError :raises: queries.DatabaseError :raises: queries.IntegrityError :raises: queries.InternalError :raises: queries.InterfaceError :raises: queries.NotSupportedError :raises: queries.OperationalError :raises: queries.ProgrammingError """ try: self._cursor.callproc(name, args) except psycopg2.Error as err: self._incr_exceptions() raise err finally: self._incr_executions() return results.Results(self._cursor)
python
def callproc(self, name, args=None): """Call a stored procedure on the server, returning the results in a :py:class:`queries.Results` instance. :param str name: The procedure name :param list args: The list of arguments to pass in :rtype: queries.Results :raises: queries.DataError :raises: queries.DatabaseError :raises: queries.IntegrityError :raises: queries.InternalError :raises: queries.InterfaceError :raises: queries.NotSupportedError :raises: queries.OperationalError :raises: queries.ProgrammingError """ try: self._cursor.callproc(name, args) except psycopg2.Error as err: self._incr_exceptions() raise err finally: self._incr_executions() return results.Results(self._cursor)
Call a stored procedure on the server, returning the results in a :py:class:`queries.Results` instance. :param str name: The procedure name :param list args: The list of arguments to pass in :rtype: queries.Results :raises: queries.DataError :raises: queries.DatabaseError :raises: queries.IntegrityError :raises: queries.InternalError :raises: queries.InterfaceError :raises: queries.NotSupportedError :raises: queries.OperationalError :raises: queries.ProgrammingError
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/session.py#L105-L129
gmr/queries
queries/session.py
Session.close
def close(self): """Explicitly close the connection and remove it from the connection pool if pooling is enabled. If the connection is already closed :raises: psycopg2.InterfaceError """ if not self._conn: raise psycopg2.InterfaceError('Connection not open') LOGGER.info('Closing connection %r in %s', self._conn, self.pid) self._pool_manager.free(self.pid, self._conn) self._pool_manager.remove_connection(self.pid, self._conn) # Un-assign the connection and cursor self._conn, self._cursor = None, None
python
def close(self): """Explicitly close the connection and remove it from the connection pool if pooling is enabled. If the connection is already closed :raises: psycopg2.InterfaceError """ if not self._conn: raise psycopg2.InterfaceError('Connection not open') LOGGER.info('Closing connection %r in %s', self._conn, self.pid) self._pool_manager.free(self.pid, self._conn) self._pool_manager.remove_connection(self.pid, self._conn) # Un-assign the connection and cursor self._conn, self._cursor = None, None
Explicitly close the connection and remove it from the connection pool if pooling is enabled. If the connection is already closed :raises: psycopg2.InterfaceError
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/session.py#L131-L145
gmr/queries
queries/session.py
Session.pid
def pid(self): """Return the pool ID used for connection pooling. :rtype: str """ return hashlib.md5(':'.join([self.__class__.__name__, self._uri]).encode('utf-8')).hexdigest()
python
def pid(self): """Return the pool ID used for connection pooling. :rtype: str """ return hashlib.md5(':'.join([self.__class__.__name__, self._uri]).encode('utf-8')).hexdigest()
Return the pool ID used for connection pooling. :rtype: str
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/session.py#L184-L191
gmr/queries
queries/session.py
Session.query
def query(self, sql, parameters=None): """A generator to issue a query on the server, mogrifying the parameters against the sql statement. Results are returned as a :py:class:`queries.Results` object which can act as an iterator and has multiple ways to access the result data. :param str sql: The SQL statement :param dict parameters: A dictionary of query parameters :rtype: queries.Results :raises: queries.DataError :raises: queries.DatabaseError :raises: queries.IntegrityError :raises: queries.InternalError :raises: queries.InterfaceError :raises: queries.NotSupportedError :raises: queries.OperationalError :raises: queries.ProgrammingError """ try: self._cursor.execute(sql, parameters) except psycopg2.Error as err: self._incr_exceptions() raise err finally: self._incr_executions() return results.Results(self._cursor)
python
def query(self, sql, parameters=None): """A generator to issue a query on the server, mogrifying the parameters against the sql statement. Results are returned as a :py:class:`queries.Results` object which can act as an iterator and has multiple ways to access the result data. :param str sql: The SQL statement :param dict parameters: A dictionary of query parameters :rtype: queries.Results :raises: queries.DataError :raises: queries.DatabaseError :raises: queries.IntegrityError :raises: queries.InternalError :raises: queries.InterfaceError :raises: queries.NotSupportedError :raises: queries.OperationalError :raises: queries.ProgrammingError """ try: self._cursor.execute(sql, parameters) except psycopg2.Error as err: self._incr_exceptions() raise err finally: self._incr_executions() return results.Results(self._cursor)
A generator to issue a query on the server, mogrifying the parameters against the sql statement. Results are returned as a :py:class:`queries.Results` object which can act as an iterator and has multiple ways to access the result data. :param str sql: The SQL statement :param dict parameters: A dictionary of query parameters :rtype: queries.Results :raises: queries.DataError :raises: queries.DatabaseError :raises: queries.IntegrityError :raises: queries.InternalError :raises: queries.InterfaceError :raises: queries.NotSupportedError :raises: queries.OperationalError :raises: queries.ProgrammingError
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/session.py#L193-L219
gmr/queries
queries/session.py
Session.set_encoding
def set_encoding(self, value=DEFAULT_ENCODING): """Set the client encoding for the session if the value specified is different than the current client encoding. :param str value: The encoding value to use """ if self._conn.encoding != value: self._conn.set_client_encoding(value)
python
def set_encoding(self, value=DEFAULT_ENCODING): """Set the client encoding for the session if the value specified is different than the current client encoding. :param str value: The encoding value to use """ if self._conn.encoding != value: self._conn.set_client_encoding(value)
Set the client encoding for the session if the value specified is different than the current client encoding. :param str value: The encoding value to use
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/session.py#L221-L229
gmr/queries
queries/session.py
Session._cleanup
def _cleanup(self): """Remove the connection from the stack, closing out the cursor""" if self._cursor: LOGGER.debug('Closing the cursor on %s', self.pid) self._cursor.close() self._cursor = None if self._conn: LOGGER.debug('Freeing %s in the pool', self.pid) try: pool.PoolManager.instance().free(self.pid, self._conn) except pool.ConnectionNotFoundError: pass self._conn = None
python
def _cleanup(self): """Remove the connection from the stack, closing out the cursor""" if self._cursor: LOGGER.debug('Closing the cursor on %s', self.pid) self._cursor.close() self._cursor = None if self._conn: LOGGER.debug('Freeing %s in the pool', self.pid) try: pool.PoolManager.instance().free(self.pid, self._conn) except pool.ConnectionNotFoundError: pass self._conn = None
Remove the connection from the stack, closing out the cursor
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/session.py#L258-L271