text
stringlengths 4
1.02M
| meta
dict |
---|---|
from flask import request, render_template, jsonify
from flask.ext.classy import FlaskView
from app import app
import os
import util
from analyze import *
# Loads the model upfront from the pickle file
model = loadTFIDFModel(util.currentTerm, util.currentYear, True)
@app.route('/findSimilarCoursestoTerm', methods=['POST'])
def searchCourses():
"""
"""
text, similarCourses = searchSimilar()
jsonCourses = []
for simCourse in similarCourses:
course = simCourse[1].description
jsonCourses.append(dict(
course=course.UID.replace('.', ' '),
title=course.title,
location=course.location,
time=course.time,
instructor=course.instructor,
description=course.description,
score="{0:.2f}".format(simCourse[0])
))
return jsonify(result=jsonCourses)
def searchSimilar():
"""
"""
text = request.form.get('text')
count = int(request.form.get('count'))
similarCourses = findSimilarity(model, text, count)
return text, similarCourses
class BaseView(FlaskView):
"""
Basic views, such as the home and about page.
"""
route_base = '/'
def index(self):
return render_template('home.html')
BaseView.register(app)
| {
"content_hash": "967c8637cef21bea1163e35b440da8a4",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 64,
"avg_line_length": 23.490909090909092,
"alnum_prop": 0.6439628482972136,
"repo_name": "kqdtran/bearRec",
"id": "021cb93513734ef81613dc12455dcd2f2687eac9",
"size": "1292",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "project/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2303"
},
{
"name": "JavaScript",
"bytes": "4620"
},
{
"name": "Python",
"bytes": "13974"
}
],
"symlink_target": ""
} |
""" Partially instantiate a variable font.
The module exports an `instantiateVariableFont` function and CLI that allow to
create full instances (i.e. static fonts) from variable fonts, as well as "partial"
variable fonts that only contain a subset of the original variation space.
For example, if you wish to pin the width axis to a given location while also
restricting the weight axis to 400..700 range, you can do::
$ fonttools varLib.instancer ./NotoSans-VF.ttf wdth=85 wght=400:700
See `fonttools varLib.instancer --help` for more info on the CLI options.
The module's entry point is the `instantiateVariableFont` function, which takes
a TTFont object and a dict specifying either axis coodinates or (min, max) ranges,
and returns a new TTFont representing either a partial VF, or full instance if all
the VF axes were given an explicit coordinate.
E.g. here's how to pin the wght axis at a given location in a wght+wdth variable
font, keeping only the deltas associated with the wdth axis::
| >>> from fontTools import ttLib
| >>> from fontTools.varLib import instancer
| >>> varfont = ttLib.TTFont("path/to/MyVariableFont.ttf")
| >>> [a.axisTag for a in varfont["fvar"].axes] # the varfont's current axes
| ['wght', 'wdth']
| >>> partial = instancer.instantiateVariableFont(varfont, {"wght": 300})
| >>> [a.axisTag for a in partial["fvar"].axes] # axes left after pinning 'wght'
| ['wdth']
If the input location specifies all the axes, the resulting instance is no longer
'variable' (same as using fontools varLib.mutator):
| >>> instance = instancer.instantiateVariableFont(
| ... varfont, {"wght": 700, "wdth": 67.5}
| ... )
| >>> "fvar" not in instance
| True
If one just want to drop an axis at the default location, without knowing in
advance what the default value for that axis is, one can pass a `None` value:
| >>> instance = instancer.instantiateVariableFont(varfont, {"wght": None})
| >>> len(varfont["fvar"].axes)
| 1
From the console script, this is equivalent to passing `wght=drop` as input.
This module is similar to fontTools.varLib.mutator, which it's intended to supersede.
Note that, unlike varLib.mutator, when an axis is not mentioned in the input
location, the varLib.instancer will keep the axis and the corresponding deltas,
whereas mutator implicitly drops the axis at its default coordinate.
The module supports all the following "levels" of instancing, which can of
course be combined:
L1
dropping one or more axes while leaving the default tables unmodified;
| >>> font = instancer.instantiateVariableFont(varfont, {"wght": None})
L2
dropping one or more axes while pinning them at non-default locations;
| >>> font = instancer.instantiateVariableFont(varfont, {"wght": 700})
L3
restricting the range of variation of one or more axes, by setting either
a new minimum or maximum, potentially -- though not necessarily -- dropping
entire regions of variations that fall completely outside this new range.
| >>> font = instancer.instantiateVariableFont(varfont, {"wght": (100, 300)})
L4
moving the default location of an axis, by specifying (min,defalt,max) values:
| >>> font = instancer.instantiateVariableFont(varfont, {"wght": (100, 300, 700)})
Currently only TrueType-flavored variable fonts (i.e. containing 'glyf' table)
are supported, but support for CFF2 variable fonts will be added soon.
The discussion and implementation of these features are tracked at
https://github.com/fonttools/fonttools/issues/1537
"""
from fontTools.misc.fixedTools import (
floatToFixedToFloat,
strToFixedToFloat,
otRound,
)
from fontTools.varLib.models import supportScalar, normalizeValue, piecewiseLinearMap
from fontTools.ttLib import TTFont
from fontTools.ttLib.tables.TupleVariation import TupleVariation
from fontTools.ttLib.tables import _g_l_y_f
from fontTools import varLib
# we import the `subset` module because we use the `prune_lookups` method on the GSUB
# table class, and that method is only defined dynamically upon importing `subset`
from fontTools import subset # noqa: F401
from fontTools.varLib import builder
from fontTools.varLib.mvar import MVAR_ENTRIES
from fontTools.varLib.merger import MutatorMerger
from fontTools.varLib.instancer import names
from .featureVars import instantiateFeatureVariations
from fontTools.misc.cliTools import makeOutputFileName
from fontTools.varLib.instancer import solver
import collections
import dataclasses
from copy import deepcopy
from enum import IntEnum
import logging
import os
import re
from typing import Dict, Iterable, Mapping, Optional, Sequence, Tuple, Union
import warnings
log = logging.getLogger("fontTools.varLib.instancer")
def AxisRange(minimum, maximum):
warnings.warn(
"AxisRange is deprecated; use AxisTriple instead",
DeprecationWarning,
stacklevel=2,
)
return AxisTriple(minimum, None, maximum)
def NormalizedAxisRange(minimum, maximum):
warnings.warn(
"NormalizedAxisRange is deprecated; use AxisTriple instead",
DeprecationWarning,
stacklevel=2,
)
return NormalizedAxisTriple(minimum, None, maximum)
@dataclasses.dataclass(frozen=True, order=True, repr=False)
class AxisTriple(Sequence):
"""A triple of (min, default, max) axis values.
The default value can be None, in which case the populateDefault() method can be
used to fill in the missing default value based on the fvar axis default.
"""
minimum: float
default: Optional[float] # if None, filled with fvar default by populateDefault
maximum: float
def __post_init__(self):
if self.default is None and self.minimum == self.maximum:
object.__setattr__(self, "default", self.minimum)
if not (
(self.minimum <= self.default <= self.maximum)
if self.default is not None
else (self.minimum <= self.maximum)
):
raise ValueError(
f"{type(self).__name__} minimum ({self.minimum}) must be <= default "
f"({self.default}) which must be <= maximum ({self.maximum})"
)
def __getitem__(self, i):
fields = dataclasses.fields(self)
return getattr(self, fields[i].name)
def __len__(self):
return len(dataclasses.fields(self))
def _replace(self, **kwargs):
return dataclasses.replace(self, **kwargs)
def __repr__(self):
return (
f"({', '.join(format(v, 'g') if v is not None else 'None' for v in self)})"
)
@classmethod
def expand(
cls,
v: Union[
"AxisTriple",
float, # pin axis at single value, same as min==default==max
Tuple[float, float], # (min, max), restrict axis and keep default
Tuple[float, float, float], # (min, default, max)
],
) -> "AxisTriple":
"""Convert a single value or a tuple into an AxisTriple.
If the input is a single value, it is interpreted as a pin at that value.
If the input is a tuple, it is interpreted as (min, max) or (min, default, max).
"""
if isinstance(v, cls):
return v
if isinstance(v, (int, float)):
return cls(v, v, v)
try:
n = len(v)
except TypeError as e:
raise ValueError(
f"expected float, 2- or 3-tuple of floats; got {type(v)}: {v!r}"
) from e
default = None
if n == 2:
minimum, maximum = v
elif n == 3:
minimum, default, maximum = v
else:
raise ValueError(f"expected sequence of 2 or 3; got {n}: {v!r}")
return cls(minimum, default, maximum)
def populateDefault(self, fvarAxisDefault) -> "AxisTriple":
"""Return a new AxisTriple with the default value filled in.
Set default to fvar axis default if the latter is within the min/max range,
otherwise set default to the min or max value, whichever is closer to the
fvar axis default.
If the default value is already set, return self.
"""
if self.default is not None:
return self
default = max(self.minimum, min(self.maximum, fvarAxisDefault))
return dataclasses.replace(self, default=default)
@dataclasses.dataclass(frozen=True, order=True, repr=False)
class NormalizedAxisTriple(AxisTriple):
"""A triple of (min, default, max) normalized axis values."""
minimum: float
default: float
maximum: float
def __post_init__(self):
if self.default is None:
object.__setattr__(self, "default", max(self.minimum, min(self.maximum, 0)))
if not (-1.0 <= self.minimum <= self.default <= self.maximum <= 1.0):
raise ValueError(
"Normalized axis values not in -1..+1 range; got "
f"minimum={self.minimum:g}, default={self.default:g}, maximum={self.maximum:g})"
)
class _BaseAxisLimits(Mapping[str, AxisTriple]):
def __getitem__(self, key: str) -> AxisTriple:
return self._data[key]
def __iter__(self) -> Iterable[str]:
return iter(self._data)
def __len__(self) -> int:
return len(self._data)
def __repr__(self) -> str:
return f"{type(self).__name__}({self._data!r})"
def __str__(self) -> str:
return str(self._data)
def defaultLocation(self) -> Dict[str, float]:
"""Return a dict of default axis values."""
return {k: v.default for k, v in self.items()}
def pinnedLocation(self) -> Dict[str, float]:
"""Return a location dict with only the pinned axes."""
return {k: v.default for k, v in self.items() if v.minimum == v.maximum}
class AxisLimits(_BaseAxisLimits):
"""Maps axis tags (str) to AxisTriple values."""
def __init__(self, *args, **kwargs):
self.have_defaults = True
self._data = data = {}
for k, v in dict(*args, **kwargs).items():
if v is None:
# will be filled in by populateDefaults
self.have_defaults = False
data[k] = v
else:
try:
triple = AxisTriple.expand(v)
except ValueError as e:
raise ValueError(f"Invalid axis limits for {k!r}: {v!r}") from e
if triple.default is None:
# also filled in by populateDefaults
self.have_defaults = False
data[k] = triple
def populateDefaults(self, varfont) -> "AxisLimits":
"""Return a new AxisLimits with defaults filled in from fvar table.
If all axis limits already have defaults, return self.
"""
if self.have_defaults:
return self
fvar = varfont["fvar"]
defaultValues = {a.axisTag: a.defaultValue for a in fvar.axes}
newLimits = {}
for axisTag, triple in self.items():
default = defaultValues[axisTag]
if triple is None:
newLimits[axisTag] = AxisTriple(default, default, default)
else:
newLimits[axisTag] = triple.populateDefault(default)
return type(self)(newLimits)
def normalize(self, varfont, usingAvar=True) -> "NormalizedAxisLimits":
"""Return a new NormalizedAxisLimits with normalized -1..0..+1 values.
If usingAvar is True, the avar table is used to warp the default normalization.
"""
fvar = varfont["fvar"]
badLimits = set(self.keys()).difference(a.axisTag for a in fvar.axes)
if badLimits:
raise ValueError("Cannot limit: {} not present in fvar".format(badLimits))
axes = {
a.axisTag: (a.minValue, a.defaultValue, a.maxValue)
for a in fvar.axes
if a.axisTag in self
}
avarSegments = {}
if usingAvar and "avar" in varfont:
avarSegments = varfont["avar"].segments
normalizedLimits = {}
for axis_tag, triple in axes.items():
if self[axis_tag] is None:
normalizedLimits[axis_tag] = NormalizedAxisTriple(0, 0, 0)
continue
minV, defaultV, maxV = self[axis_tag]
if defaultV is None:
defaultV = triple[1]
avarMapping = avarSegments.get(axis_tag, None)
normalizedLimits[axis_tag] = NormalizedAxisTriple(
*(normalize(v, triple, avarMapping) for v in (minV, defaultV, maxV))
)
return NormalizedAxisLimits(normalizedLimits)
class NormalizedAxisLimits(_BaseAxisLimits):
"""Maps axis tags (str) to NormalizedAxisTriple values."""
def __init__(self, *args, **kwargs):
self._data = data = {}
for k, v in dict(*args, **kwargs).items():
try:
triple = NormalizedAxisTriple.expand(v)
except ValueError as e:
raise ValueError(f"Invalid axis limits for {k!r}: {v!r}") from e
data[k] = triple
class OverlapMode(IntEnum):
KEEP_AND_DONT_SET_FLAGS = 0
KEEP_AND_SET_FLAGS = 1
REMOVE = 2
REMOVE_AND_IGNORE_ERRORS = 3
def instantiateTupleVariationStore(
variations, axisLimits, origCoords=None, endPts=None
):
"""Instantiate TupleVariation list at the given location, or limit axes' min/max.
The 'variations' list of TupleVariation objects is modified in-place.
The 'axisLimits' (dict) maps axis tags (str) to NormalizedAxisTriple namedtuples
specifying (minimum, default, maximum) in the -1,0,+1 normalized space. Pinned axes
have minimum == default == maximum.
A 'full' instance (i.e. static font) is produced when all the axes are pinned to
single coordinates; a 'partial' instance (i.e. a less variable font) is produced
when some of the axes are omitted, or restricted with a new range.
Tuples that do not participate are kept as they are. Those that have 0 influence
at the given location are removed from the variation store.
Those that are fully instantiated (i.e. all their axes are being pinned) are also
removed from the variation store, their scaled deltas accummulated and returned, so
that they can be added by the caller to the default instance's coordinates.
Tuples that are only partially instantiated (i.e. not all the axes that they
participate in are being pinned) are kept in the store, and their deltas multiplied
by the scalar support of the axes to be pinned at the desired location.
Args:
variations: List[TupleVariation] from either 'gvar' or 'cvar'.
axisLimits: NormalizedAxisLimits: map from axis tags to (min, default, max)
normalized coordinates for the full or partial instance.
origCoords: GlyphCoordinates: default instance's coordinates for computing 'gvar'
inferred points (cf. table__g_l_y_f._getCoordinatesAndControls).
endPts: List[int]: indices of contour end points, for inferring 'gvar' deltas.
Returns:
List[float]: the overall delta adjustment after applicable deltas were summed.
"""
newVariations = changeTupleVariationsAxisLimits(variations, axisLimits)
mergedVariations = collections.OrderedDict()
for var in newVariations:
# compute inferred deltas only for gvar ('origCoords' is None for cvar)
if origCoords is not None:
var.calcInferredDeltas(origCoords, endPts)
# merge TupleVariations with overlapping "tents"
axes = frozenset(var.axes.items())
if axes in mergedVariations:
mergedVariations[axes] += var
else:
mergedVariations[axes] = var
# drop TupleVariation if all axes have been pinned (var.axes.items() is empty);
# its deltas will be added to the default instance's coordinates
defaultVar = mergedVariations.pop(frozenset(), None)
for var in mergedVariations.values():
var.roundDeltas()
variations[:] = list(mergedVariations.values())
return defaultVar.coordinates if defaultVar is not None else []
def changeTupleVariationsAxisLimits(variations, axisLimits):
for axisTag, axisLimit in sorted(axisLimits.items()):
newVariations = []
for var in variations:
newVariations.extend(changeTupleVariationAxisLimit(var, axisTag, axisLimit))
variations = newVariations
return variations
def changeTupleVariationAxisLimit(var, axisTag, axisLimit):
assert isinstance(axisLimit, NormalizedAxisTriple)
# Skip when current axis is missing (i.e. doesn't participate),
lower, peak, upper = var.axes.get(axisTag, (-1, 0, 1))
if peak == 0:
return [var]
# Drop if the var 'tent' isn't well-formed
if not (lower <= peak <= upper) or (lower < 0 and upper > 0):
return []
if axisTag not in var.axes:
return [var]
tent = var.axes[axisTag]
solutions = solver.rebaseTent(tent, axisLimit)
out = []
for scalar, tent in solutions:
newVar = (
TupleVariation(var.axes, var.coordinates) if len(solutions) > 1 else var
)
if tent is None:
newVar.axes.pop(axisTag)
else:
assert tent[1] != 0, tent
newVar.axes[axisTag] = tent
newVar *= scalar
out.append(newVar)
return out
def _instantiateGvarGlyph(
glyphname, glyf, gvar, hMetrics, vMetrics, axisLimits, optimize=True
):
coordinates, ctrl = glyf._getCoordinatesAndControls(glyphname, hMetrics, vMetrics)
endPts = ctrl.endPts
# Not every glyph may have variations
tupleVarStore = gvar.variations.get(glyphname)
if tupleVarStore:
defaultDeltas = instantiateTupleVariationStore(
tupleVarStore, axisLimits, coordinates, endPts
)
if defaultDeltas:
coordinates += _g_l_y_f.GlyphCoordinates(defaultDeltas)
# _setCoordinates also sets the hmtx/vmtx advance widths and sidebearings from
# the four phantom points and glyph bounding boxes.
# We call it unconditionally even if a glyph has no variations or no deltas are
# applied at this location, in case the glyph's xMin and in turn its sidebearing
# have changed. E.g. a composite glyph has no deltas for the component's (x, y)
# offset nor for the 4 phantom points (e.g. it's monospaced). Thus its entry in
# gvar table is empty; however, the composite's base glyph may have deltas
# applied, hence the composite's bbox and left/top sidebearings may need updating
# in the instanced font.
glyf._setCoordinates(glyphname, coordinates, hMetrics, vMetrics)
if not tupleVarStore:
if glyphname in gvar.variations:
del gvar.variations[glyphname]
return
if optimize:
isComposite = glyf[glyphname].isComposite()
for var in tupleVarStore:
var.optimize(coordinates, endPts, isComposite)
def instantiateGvarGlyph(varfont, glyphname, axisLimits, optimize=True):
"""Remove?
https://github.com/fonttools/fonttools/pull/2266"""
gvar = varfont["gvar"]
glyf = varfont["glyf"]
hMetrics = varfont["hmtx"].metrics
vMetrics = getattr(varfont.get("vmtx"), "metrics", None)
_instantiateGvarGlyph(
glyphname, glyf, gvar, hMetrics, vMetrics, axisLimits, optimize=optimize
)
def instantiateGvar(varfont, axisLimits, optimize=True):
log.info("Instantiating glyf/gvar tables")
gvar = varfont["gvar"]
glyf = varfont["glyf"]
hMetrics = varfont["hmtx"].metrics
vMetrics = getattr(varfont.get("vmtx"), "metrics", None)
# Get list of glyph names sorted by component depth.
# If a composite glyph is processed before its base glyph, the bounds may
# be calculated incorrectly because deltas haven't been applied to the
# base glyph yet.
glyphnames = sorted(
glyf.glyphOrder,
key=lambda name: (
glyf[name].getCompositeMaxpValues(glyf).maxComponentDepth
if glyf[name].isComposite()
else 0,
name,
),
)
for glyphname in glyphnames:
_instantiateGvarGlyph(
glyphname, glyf, gvar, hMetrics, vMetrics, axisLimits, optimize=optimize
)
if not gvar.variations:
del varfont["gvar"]
def setCvarDeltas(cvt, deltas):
for i, delta in enumerate(deltas):
if delta:
cvt[i] += otRound(delta)
def instantiateCvar(varfont, axisLimits):
log.info("Instantiating cvt/cvar tables")
cvar = varfont["cvar"]
defaultDeltas = instantiateTupleVariationStore(cvar.variations, axisLimits)
if defaultDeltas:
setCvarDeltas(varfont["cvt "], defaultDeltas)
if not cvar.variations:
del varfont["cvar"]
def setMvarDeltas(varfont, deltas):
mvar = varfont["MVAR"].table
records = mvar.ValueRecord
for rec in records:
mvarTag = rec.ValueTag
if mvarTag not in MVAR_ENTRIES:
continue
tableTag, itemName = MVAR_ENTRIES[mvarTag]
delta = deltas[rec.VarIdx]
if delta != 0:
setattr(
varfont[tableTag],
itemName,
getattr(varfont[tableTag], itemName) + otRound(delta),
)
def instantiateMVAR(varfont, axisLimits):
log.info("Instantiating MVAR table")
mvar = varfont["MVAR"].table
fvarAxes = varfont["fvar"].axes
varStore = mvar.VarStore
defaultDeltas = instantiateItemVariationStore(varStore, fvarAxes, axisLimits)
setMvarDeltas(varfont, defaultDeltas)
if varStore.VarRegionList.Region:
varIndexMapping = varStore.optimize()
for rec in mvar.ValueRecord:
rec.VarIdx = varIndexMapping[rec.VarIdx]
else:
del varfont["MVAR"]
def _remapVarIdxMap(table, attrName, varIndexMapping, glyphOrder):
oldMapping = getattr(table, attrName).mapping
newMapping = [varIndexMapping[oldMapping[glyphName]] for glyphName in glyphOrder]
setattr(table, attrName, builder.buildVarIdxMap(newMapping, glyphOrder))
# TODO(anthrotype) Add support for HVAR/VVAR in CFF2
def _instantiateVHVAR(varfont, axisLimits, tableFields):
location = axisLimits.pinnedLocation()
tableTag = tableFields.tableTag
fvarAxes = varfont["fvar"].axes
# Deltas from gvar table have already been applied to the hmtx/vmtx. For full
# instances (i.e. all axes pinned), we can simply drop HVAR/VVAR and return
if set(location).issuperset(axis.axisTag for axis in fvarAxes):
log.info("Dropping %s table", tableTag)
del varfont[tableTag]
return
log.info("Instantiating %s table", tableTag)
vhvar = varfont[tableTag].table
varStore = vhvar.VarStore
# since deltas were already applied, the return value here is ignored
instantiateItemVariationStore(varStore, fvarAxes, axisLimits)
if varStore.VarRegionList.Region:
# Only re-optimize VarStore if the HVAR/VVAR already uses indirect AdvWidthMap
# or AdvHeightMap. If a direct, implicit glyphID->VariationIndex mapping is
# used for advances, skip re-optimizing and maintain original VariationIndex.
if getattr(vhvar, tableFields.advMapping):
varIndexMapping = varStore.optimize(use_NO_VARIATION_INDEX=False)
glyphOrder = varfont.getGlyphOrder()
_remapVarIdxMap(vhvar, tableFields.advMapping, varIndexMapping, glyphOrder)
if getattr(vhvar, tableFields.sb1): # left or top sidebearings
_remapVarIdxMap(vhvar, tableFields.sb1, varIndexMapping, glyphOrder)
if getattr(vhvar, tableFields.sb2): # right or bottom sidebearings
_remapVarIdxMap(vhvar, tableFields.sb2, varIndexMapping, glyphOrder)
if tableTag == "VVAR" and getattr(vhvar, tableFields.vOrigMapping):
_remapVarIdxMap(
vhvar, tableFields.vOrigMapping, varIndexMapping, glyphOrder
)
def instantiateHVAR(varfont, axisLimits):
return _instantiateVHVAR(varfont, axisLimits, varLib.HVAR_FIELDS)
def instantiateVVAR(varfont, axisLimits):
return _instantiateVHVAR(varfont, axisLimits, varLib.VVAR_FIELDS)
class _TupleVarStoreAdapter(object):
def __init__(self, regions, axisOrder, tupleVarData, itemCounts):
self.regions = regions
self.axisOrder = axisOrder
self.tupleVarData = tupleVarData
self.itemCounts = itemCounts
@classmethod
def fromItemVarStore(cls, itemVarStore, fvarAxes):
axisOrder = [axis.axisTag for axis in fvarAxes]
regions = [
region.get_support(fvarAxes) for region in itemVarStore.VarRegionList.Region
]
tupleVarData = []
itemCounts = []
for varData in itemVarStore.VarData:
variations = []
varDataRegions = (regions[i] for i in varData.VarRegionIndex)
for axes, coordinates in zip(varDataRegions, zip(*varData.Item)):
variations.append(TupleVariation(axes, list(coordinates)))
tupleVarData.append(variations)
itemCounts.append(varData.ItemCount)
return cls(regions, axisOrder, tupleVarData, itemCounts)
def rebuildRegions(self):
# Collect the set of all unique region axes from the current TupleVariations.
# We use an OrderedDict to de-duplicate regions while keeping the order.
uniqueRegions = collections.OrderedDict.fromkeys(
(
frozenset(var.axes.items())
for variations in self.tupleVarData
for var in variations
)
)
# Maintain the original order for the regions that pre-existed, appending
# the new regions at the end of the region list.
newRegions = []
for region in self.regions:
regionAxes = frozenset(region.items())
if regionAxes in uniqueRegions:
newRegions.append(region)
del uniqueRegions[regionAxes]
if uniqueRegions:
newRegions.extend(dict(region) for region in uniqueRegions)
self.regions = newRegions
def instantiate(self, axisLimits):
defaultDeltaArray = []
for variations, itemCount in zip(self.tupleVarData, self.itemCounts):
defaultDeltas = instantiateTupleVariationStore(variations, axisLimits)
if not defaultDeltas:
defaultDeltas = [0] * itemCount
defaultDeltaArray.append(defaultDeltas)
# rebuild regions whose axes were dropped or limited
self.rebuildRegions()
pinnedAxes = set(axisLimits.pinnedLocation())
self.axisOrder = [
axisTag for axisTag in self.axisOrder if axisTag not in pinnedAxes
]
return defaultDeltaArray
def asItemVarStore(self):
regionOrder = [frozenset(axes.items()) for axes in self.regions]
varDatas = []
for variations, itemCount in zip(self.tupleVarData, self.itemCounts):
if variations:
assert len(variations[0].coordinates) == itemCount
varRegionIndices = [
regionOrder.index(frozenset(var.axes.items())) for var in variations
]
varDataItems = list(zip(*(var.coordinates for var in variations)))
varDatas.append(
builder.buildVarData(varRegionIndices, varDataItems, optimize=False)
)
else:
varDatas.append(
builder.buildVarData([], [[] for _ in range(itemCount)])
)
regionList = builder.buildVarRegionList(self.regions, self.axisOrder)
itemVarStore = builder.buildVarStore(regionList, varDatas)
# remove unused regions from VarRegionList
itemVarStore.prune_regions()
return itemVarStore
def instantiateItemVariationStore(itemVarStore, fvarAxes, axisLimits):
"""Compute deltas at partial location, and update varStore in-place.
Remove regions in which all axes were instanced, or fall outside the new axis
limits. Scale the deltas of the remaining regions where only some of the axes
were instanced.
The number of VarData subtables, and the number of items within each, are
not modified, in order to keep the existing VariationIndex valid.
One may call VarStore.optimize() method after this to further optimize those.
Args:
varStore: An otTables.VarStore object (Item Variation Store)
fvarAxes: list of fvar's Axis objects
axisLimits: NormalizedAxisLimits: mapping axis tags to normalized
min/default/max axis coordinates. May not specify coordinates/ranges for
all the fvar axes.
Returns:
defaultDeltas: to be added to the default instance, of type dict of floats
keyed by VariationIndex compound values: i.e. (outer << 16) + inner.
"""
tupleVarStore = _TupleVarStoreAdapter.fromItemVarStore(itemVarStore, fvarAxes)
defaultDeltaArray = tupleVarStore.instantiate(axisLimits)
newItemVarStore = tupleVarStore.asItemVarStore()
itemVarStore.VarRegionList = newItemVarStore.VarRegionList
assert itemVarStore.VarDataCount == newItemVarStore.VarDataCount
itemVarStore.VarData = newItemVarStore.VarData
defaultDeltas = {
((major << 16) + minor): delta
for major, deltas in enumerate(defaultDeltaArray)
for minor, delta in enumerate(deltas)
}
defaultDeltas[itemVarStore.NO_VARIATION_INDEX] = 0
return defaultDeltas
def instantiateOTL(varfont, axisLimits):
# TODO(anthrotype) Support partial instancing of JSTF and BASE tables
if (
"GDEF" not in varfont
or varfont["GDEF"].table.Version < 0x00010003
or not varfont["GDEF"].table.VarStore
):
return
if "GPOS" in varfont:
msg = "Instantiating GDEF and GPOS tables"
else:
msg = "Instantiating GDEF table"
log.info(msg)
gdef = varfont["GDEF"].table
varStore = gdef.VarStore
fvarAxes = varfont["fvar"].axes
defaultDeltas = instantiateItemVariationStore(varStore, fvarAxes, axisLimits)
# When VF are built, big lookups may overflow and be broken into multiple
# subtables. MutatorMerger (which inherits from AligningMerger) reattaches
# them upon instancing, in case they can now fit a single subtable (if not,
# they will be split again upon compilation).
# This 'merger' also works as a 'visitor' that traverses the OTL tables and
# calls specific methods when instances of a given type are found.
# Specifically, it adds default deltas to GPOS Anchors/ValueRecords and GDEF
# LigatureCarets, and optionally deletes all VariationIndex tables if the
# VarStore is fully instanced.
merger = MutatorMerger(
varfont, defaultDeltas, deleteVariations=(not varStore.VarRegionList.Region)
)
merger.mergeTables(varfont, [varfont], ["GDEF", "GPOS"])
if varStore.VarRegionList.Region:
varIndexMapping = varStore.optimize()
gdef.remap_device_varidxes(varIndexMapping)
if "GPOS" in varfont:
varfont["GPOS"].table.remap_device_varidxes(varIndexMapping)
else:
# Downgrade GDEF.
del gdef.VarStore
gdef.Version = 0x00010002
if gdef.MarkGlyphSetsDef is None:
del gdef.MarkGlyphSetsDef
gdef.Version = 0x00010000
if not (
gdef.LigCaretList
or gdef.MarkAttachClassDef
or gdef.GlyphClassDef
or gdef.AttachList
or (gdef.Version >= 0x00010002 and gdef.MarkGlyphSetsDef)
):
del varfont["GDEF"]
def _isValidAvarSegmentMap(axisTag, segmentMap):
if not segmentMap:
return True
if not {(-1.0, -1.0), (0, 0), (1.0, 1.0)}.issubset(segmentMap.items()):
log.warning(
f"Invalid avar SegmentMap record for axis '{axisTag}': does not "
"include all required value maps {-1.0: -1.0, 0: 0, 1.0: 1.0}"
)
return False
previousValue = None
for fromCoord, toCoord in sorted(segmentMap.items()):
if previousValue is not None and previousValue > toCoord:
log.warning(
f"Invalid avar AxisValueMap({fromCoord}, {toCoord}) record "
f"for axis '{axisTag}': the toCoordinate value must be >= to "
f"the toCoordinate value of the preceding record ({previousValue})."
)
return False
previousValue = toCoord
return True
def instantiateAvar(varfont, axisLimits):
# 'axisLimits' dict must contain user-space (non-normalized) coordinates.
segments = varfont["avar"].segments
# drop table if we instantiate all the axes
pinnedAxes = set(axisLimits.pinnedLocation())
if pinnedAxes.issuperset(segments):
log.info("Dropping avar table")
del varfont["avar"]
return
log.info("Instantiating avar table")
for axis in pinnedAxes:
if axis in segments:
del segments[axis]
# First compute the default normalization for axisLimits coordinates: i.e.
# min = -1.0, default = 0, max = +1.0, and in between values interpolated linearly,
# without using the avar table's mappings.
# Then, for each SegmentMap, if we are restricting its axis, compute the new
# mappings by dividing the key/value pairs by the desired new min/max values,
# dropping any mappings that fall outside the restricted range.
# The keys ('fromCoord') are specified in default normalized coordinate space,
# whereas the values ('toCoord') are "mapped forward" using the SegmentMap.
normalizedRanges = axisLimits.normalize(varfont, usingAvar=False)
newSegments = {}
for axisTag, mapping in segments.items():
if not _isValidAvarSegmentMap(axisTag, mapping):
continue
if mapping and axisTag in normalizedRanges:
axisRange = normalizedRanges[axisTag]
mappedMin = floatToFixedToFloat(
piecewiseLinearMap(axisRange.minimum, mapping), 14
)
mappedDef = floatToFixedToFloat(
piecewiseLinearMap(axisRange.default, mapping), 14
)
mappedMax = floatToFixedToFloat(
piecewiseLinearMap(axisRange.maximum, mapping), 14
)
newMapping = {}
for fromCoord, toCoord in mapping.items():
if fromCoord < axisRange.minimum or fromCoord > axisRange.maximum:
continue
fromCoord = normalizeValue(fromCoord, axisRange)
assert mappedMin <= toCoord <= mappedMax
toCoord = normalizeValue(toCoord, (mappedMin, mappedDef, mappedMax))
fromCoord = floatToFixedToFloat(fromCoord, 14)
toCoord = floatToFixedToFloat(toCoord, 14)
newMapping[fromCoord] = toCoord
newMapping.update({-1.0: -1.0, 0.0: 0.0, 1.0: 1.0})
newSegments[axisTag] = newMapping
else:
newSegments[axisTag] = mapping
varfont["avar"].segments = newSegments
def isInstanceWithinAxisRanges(location, axisRanges):
for axisTag, coord in location.items():
if axisTag in axisRanges:
axisRange = axisRanges[axisTag]
if coord < axisRange.minimum or coord > axisRange.maximum:
return False
return True
def instantiateFvar(varfont, axisLimits):
# 'axisLimits' dict must contain user-space (non-normalized) coordinates
location = axisLimits.pinnedLocation()
fvar = varfont["fvar"]
# drop table if we instantiate all the axes
if set(location).issuperset(axis.axisTag for axis in fvar.axes):
log.info("Dropping fvar table")
del varfont["fvar"]
return
log.info("Instantiating fvar table")
axes = []
for axis in fvar.axes:
axisTag = axis.axisTag
if axisTag in location:
continue
if axisTag in axisLimits:
triple = axisLimits[axisTag]
if triple.default is None:
triple = (triple.minimum, axis.defaultValue, triple.maximum)
axis.minValue, axis.defaultValue, axis.maxValue = triple
axes.append(axis)
fvar.axes = axes
# only keep NamedInstances whose coordinates == pinned axis location
instances = []
for instance in fvar.instances:
if any(instance.coordinates[axis] != value for axis, value in location.items()):
continue
for axisTag in location:
del instance.coordinates[axisTag]
if not isInstanceWithinAxisRanges(instance.coordinates, axisLimits):
continue
instances.append(instance)
fvar.instances = instances
def instantiateSTAT(varfont, axisLimits):
# 'axisLimits' dict must contain user-space (non-normalized) coordinates
stat = varfont["STAT"].table
if not stat.DesignAxisRecord or not (
stat.AxisValueArray and stat.AxisValueArray.AxisValue
):
return # STAT table empty, nothing to do
log.info("Instantiating STAT table")
newAxisValueTables = axisValuesFromAxisLimits(stat, axisLimits)
stat.AxisValueCount = len(newAxisValueTables)
if stat.AxisValueCount:
stat.AxisValueArray.AxisValue = newAxisValueTables
else:
stat.AxisValueArray = None
def axisValuesFromAxisLimits(stat, axisLimits):
def isAxisValueOutsideLimits(axisTag, axisValue):
if axisTag in axisLimits:
triple = axisLimits[axisTag]
if axisValue < triple.minimum or axisValue > triple.maximum:
return True
return False
# only keep AxisValues whose axis is not pinned nor restricted, or is pinned at the
# exact (nominal) value, or is restricted but the value is within the new range
designAxes = stat.DesignAxisRecord.Axis
newAxisValueTables = []
for axisValueTable in stat.AxisValueArray.AxisValue:
axisValueFormat = axisValueTable.Format
if axisValueFormat in (1, 2, 3):
axisTag = designAxes[axisValueTable.AxisIndex].AxisTag
if axisValueFormat == 2:
axisValue = axisValueTable.NominalValue
else:
axisValue = axisValueTable.Value
if isAxisValueOutsideLimits(axisTag, axisValue):
continue
elif axisValueFormat == 4:
# drop 'non-analytic' AxisValue if _any_ AxisValueRecord doesn't match
# the pinned location or is outside range
dropAxisValueTable = False
for rec in axisValueTable.AxisValueRecord:
axisTag = designAxes[rec.AxisIndex].AxisTag
axisValue = rec.Value
if isAxisValueOutsideLimits(axisTag, axisValue):
dropAxisValueTable = True
break
if dropAxisValueTable:
continue
else:
log.warning("Unknown AxisValue table format (%s); ignored", axisValueFormat)
newAxisValueTables.append(axisValueTable)
return newAxisValueTables
def setMacOverlapFlags(glyfTable):
flagOverlapCompound = _g_l_y_f.OVERLAP_COMPOUND
flagOverlapSimple = _g_l_y_f.flagOverlapSimple
for glyphName in glyfTable.keys():
glyph = glyfTable[glyphName]
# Set OVERLAP_COMPOUND bit for compound glyphs
if glyph.isComposite():
glyph.components[0].flags |= flagOverlapCompound
# Set OVERLAP_SIMPLE bit for simple glyphs
elif glyph.numberOfContours > 0:
glyph.flags[0] |= flagOverlapSimple
def normalize(value, triple, avarMapping):
value = normalizeValue(value, triple)
if avarMapping:
value = piecewiseLinearMap(value, avarMapping)
# Quantize to F2Dot14, to avoid surprise interpolations.
return floatToFixedToFloat(value, 14)
def sanityCheckVariableTables(varfont):
if "fvar" not in varfont:
raise ValueError("Missing required table fvar")
if "gvar" in varfont:
if "glyf" not in varfont:
raise ValueError("Can't have gvar without glyf")
# TODO(anthrotype) Remove once we do support partial instancing CFF2
if "CFF2" in varfont:
raise NotImplementedError("Instancing CFF2 variable fonts is not supported yet")
def instantiateVariableFont(
varfont,
axisLimits,
inplace=False,
optimize=True,
overlap=OverlapMode.KEEP_AND_SET_FLAGS,
updateFontNames=False,
):
"""Instantiate variable font, either fully or partially.
Depending on whether the `axisLimits` dictionary references all or some of the
input varfont's axes, the output font will either be a full instance (static
font) or a variable font with possibly less variation data.
Args:
varfont: a TTFont instance, which must contain at least an 'fvar' table.
Note that variable fonts with 'CFF2' table are not supported yet.
axisLimits: a dict keyed by axis tags (str) containing the coordinates (float)
along one or more axes where the desired instance will be located.
If the value is `None`, the default coordinate as per 'fvar' table for
that axis is used.
The limit values can also be (min, max) tuples for restricting an
axis's variation range. The default axis value must be included in
the new range.
inplace (bool): whether to modify input TTFont object in-place instead of
returning a distinct object.
optimize (bool): if False, do not perform IUP-delta optimization on the
remaining 'gvar' table's deltas. Possibly faster, and might work around
rendering issues in some buggy environments, at the cost of a slightly
larger file size.
overlap (OverlapMode): variable fonts usually contain overlapping contours, and
some font rendering engines on Apple platforms require that the
`OVERLAP_SIMPLE` and `OVERLAP_COMPOUND` flags in the 'glyf' table be set to
force rendering using a non-zero fill rule. Thus we always set these flags
on all glyphs to maximise cross-compatibility of the generated instance.
You can disable this by passing OverlapMode.KEEP_AND_DONT_SET_FLAGS.
If you want to remove the overlaps altogether and merge overlapping
contours and components, you can pass OverlapMode.REMOVE (or
REMOVE_AND_IGNORE_ERRORS to not hard-fail on tricky glyphs). Note that this
requires the skia-pathops package (available to pip install).
The overlap parameter only has effect when generating full static instances.
updateFontNames (bool): if True, update the instantiated font's name table using
the Axis Value Tables from the STAT table. The name table and the style bits
in the head and OS/2 table will be updated so they conform to the R/I/B/BI
model. If the STAT table is missing or an Axis Value table is missing for
a given axis coordinate, a ValueError will be raised.
"""
# 'overlap' used to be bool and is now enum; for backward compat keep accepting bool
overlap = OverlapMode(int(overlap))
sanityCheckVariableTables(varfont)
axisLimits = AxisLimits(axisLimits).populateDefaults(varfont)
normalizedLimits = axisLimits.normalize(varfont)
log.info("Normalized limits: %s", normalizedLimits)
if not inplace:
varfont = deepcopy(varfont)
if "DSIG" in varfont:
del varfont["DSIG"]
if updateFontNames:
log.info("Updating name table")
names.updateNameTable(varfont, axisLimits)
if "gvar" in varfont:
instantiateGvar(varfont, normalizedLimits, optimize=optimize)
if "cvar" in varfont:
instantiateCvar(varfont, normalizedLimits)
if "MVAR" in varfont:
instantiateMVAR(varfont, normalizedLimits)
if "HVAR" in varfont:
instantiateHVAR(varfont, normalizedLimits)
if "VVAR" in varfont:
instantiateVVAR(varfont, normalizedLimits)
instantiateOTL(varfont, normalizedLimits)
instantiateFeatureVariations(varfont, normalizedLimits)
if "avar" in varfont:
instantiateAvar(varfont, axisLimits)
with names.pruningUnusedNames(varfont):
if "STAT" in varfont:
instantiateSTAT(varfont, axisLimits)
instantiateFvar(varfont, axisLimits)
if "fvar" not in varfont:
if "glyf" in varfont:
if overlap == OverlapMode.KEEP_AND_SET_FLAGS:
setMacOverlapFlags(varfont["glyf"])
elif overlap in (OverlapMode.REMOVE, OverlapMode.REMOVE_AND_IGNORE_ERRORS):
from fontTools.ttLib.removeOverlaps import removeOverlaps
log.info("Removing overlaps from glyf table")
removeOverlaps(
varfont,
ignoreErrors=(overlap == OverlapMode.REMOVE_AND_IGNORE_ERRORS),
)
varLib.set_default_weight_width_slant(
varfont, location=axisLimits.defaultLocation()
)
if updateFontNames:
# Set Regular/Italic/Bold/Bold Italic bits as appropriate, after the
# name table has been updated.
setRibbiBits(varfont)
return varfont
def setRibbiBits(font):
"""Set the `head.macStyle` and `OS/2.fsSelection` style bits
appropriately."""
english_ribbi_style = font["name"].getName(names.NameID.SUBFAMILY_NAME, 3, 1, 0x409)
if english_ribbi_style is None:
return
styleMapStyleName = english_ribbi_style.toStr().lower()
if styleMapStyleName not in {"regular", "bold", "italic", "bold italic"}:
return
if styleMapStyleName == "bold":
font["head"].macStyle = 0b01
elif styleMapStyleName == "bold italic":
font["head"].macStyle = 0b11
elif styleMapStyleName == "italic":
font["head"].macStyle = 0b10
selection = font["OS/2"].fsSelection
# First clear...
selection &= ~(1 << 0)
selection &= ~(1 << 5)
selection &= ~(1 << 6)
# ...then re-set the bits.
if styleMapStyleName == "regular":
selection |= 1 << 6
elif styleMapStyleName == "bold":
selection |= 1 << 5
elif styleMapStyleName == "italic":
selection |= 1 << 0
elif styleMapStyleName == "bold italic":
selection |= 1 << 0
selection |= 1 << 5
font["OS/2"].fsSelection = selection
def parseLimits(limits: Iterable[str]) -> Dict[str, Optional[AxisTriple]]:
result = {}
for limitString in limits:
match = re.match(
r"^(\w{1,4})=(?:(drop)|(?:([^:]+)(?:[:]([^:]+))?(?:[:]([^:]+))?))$",
limitString,
)
if not match:
raise ValueError("invalid location format: %r" % limitString)
tag = match.group(1).ljust(4)
if match.group(2): # 'drop'
lbound = None
else:
lbound = strToFixedToFloat(match.group(3), precisionBits=16)
ubound = default = lbound
if match.group(4):
ubound = default = strToFixedToFloat(match.group(4), precisionBits=16)
default = None
if match.group(5):
default = ubound
ubound = strToFixedToFloat(match.group(5), precisionBits=16)
if all(v is None for v in (lbound, default, ubound)):
result[tag] = None
continue
result[tag] = AxisTriple(lbound, default, ubound)
return result
def parseArgs(args):
"""Parse argv.
Returns:
3-tuple (infile, axisLimits, options)
axisLimits is either a Dict[str, Optional[float]], for pinning variation axes
to specific coordinates along those axes (with `None` as a placeholder for an
axis' default value); or a Dict[str, Tuple(float, float)], meaning limit this
axis to min/max range.
Axes locations are in user-space coordinates, as defined in the "fvar" table.
"""
from fontTools import configLogger
import argparse
parser = argparse.ArgumentParser(
"fonttools varLib.instancer",
description="Partially instantiate a variable font",
)
parser.add_argument("input", metavar="INPUT.ttf", help="Input variable TTF file.")
parser.add_argument(
"locargs",
metavar="AXIS=LOC",
nargs="*",
help="List of space separated locations. A location consists of "
"the tag of a variation axis, followed by '=' and one of number, "
"number:number or the literal string 'drop'. "
"E.g.: wdth=100 or wght=75.0:125.0 or wght=drop",
)
parser.add_argument(
"-o",
"--output",
metavar="OUTPUT.ttf",
default=None,
help="Output instance TTF file (default: INPUT-instance.ttf).",
)
parser.add_argument(
"--no-optimize",
dest="optimize",
action="store_false",
help="Don't perform IUP optimization on the remaining gvar TupleVariations",
)
parser.add_argument(
"--no-overlap-flag",
dest="overlap",
action="store_false",
help="Don't set OVERLAP_SIMPLE/OVERLAP_COMPOUND glyf flags (only applicable "
"when generating a full instance)",
)
parser.add_argument(
"--remove-overlaps",
dest="remove_overlaps",
action="store_true",
help="Merge overlapping contours and components (only applicable "
"when generating a full instance). Requires skia-pathops",
)
parser.add_argument(
"--ignore-overlap-errors",
dest="ignore_overlap_errors",
action="store_true",
help="Don't crash if the remove-overlaps operation fails for some glyphs.",
)
parser.add_argument(
"--update-name-table",
action="store_true",
help="Update the instantiated font's `name` table. Input font must have "
"a STAT table with Axis Value Tables",
)
parser.add_argument(
"--no-recalc-timestamp",
dest="recalc_timestamp",
action="store_false",
help="Don't set the output font's timestamp to the current time.",
)
parser.add_argument(
"--no-recalc-bounds",
dest="recalc_bounds",
action="store_false",
help="Don't recalculate font bounding boxes",
)
loggingGroup = parser.add_mutually_exclusive_group(required=False)
loggingGroup.add_argument(
"-v", "--verbose", action="store_true", help="Run more verbosely."
)
loggingGroup.add_argument(
"-q", "--quiet", action="store_true", help="Turn verbosity off."
)
options = parser.parse_args(args)
if options.remove_overlaps:
if options.ignore_overlap_errors:
options.overlap = OverlapMode.REMOVE_AND_IGNORE_ERRORS
else:
options.overlap = OverlapMode.REMOVE
else:
options.overlap = OverlapMode(int(options.overlap))
infile = options.input
if not os.path.isfile(infile):
parser.error("No such file '{}'".format(infile))
configLogger(
level=("DEBUG" if options.verbose else "ERROR" if options.quiet else "INFO")
)
try:
axisLimits = parseLimits(options.locargs)
except ValueError as e:
parser.error(str(e))
if len(axisLimits) != len(options.locargs):
parser.error("Specified multiple limits for the same axis")
return (infile, axisLimits, options)
def main(args=None):
"""Partially instantiate a variable font"""
infile, axisLimits, options = parseArgs(args)
log.info("Restricting axes: %s", axisLimits)
log.info("Loading variable font")
varfont = TTFont(
infile,
recalcTimestamp=options.recalc_timestamp,
recalcBBoxes=options.recalc_bounds,
)
isFullInstance = {
axisTag for axisTag, limit in axisLimits.items() if not isinstance(limit, tuple)
}.issuperset(axis.axisTag for axis in varfont["fvar"].axes)
instantiateVariableFont(
varfont,
axisLimits,
inplace=True,
optimize=options.optimize,
overlap=options.overlap,
updateFontNames=options.update_name_table,
)
suffix = "-instance" if isFullInstance else "-partial"
outfile = (
makeOutputFileName(infile, overWrite=True, suffix=suffix)
if not options.output
else options.output
)
log.info(
"Saving %s font %s",
"instance" if isFullInstance else "partial variable",
outfile,
)
varfont.save(outfile)
| {
"content_hash": "b4c6549ad57751e29d0e8ff2fd8ae102",
"timestamp": "",
"source": "github",
"line_count": 1406,
"max_line_length": 96,
"avg_line_length": 37.519203413940254,
"alnum_prop": 0.6523354564755839,
"repo_name": "fonttools/fonttools",
"id": "0b0c8349d15008f8ae313e4b9809d2abb33097e2",
"size": "52752",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "Lib/fontTools/varLib/instancer/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "3522"
},
{
"name": "Makefile",
"bytes": "352"
},
{
"name": "Python",
"bytes": "5442538"
}
],
"symlink_target": ""
} |
"""
Sample Rule
===========
This is a simple rule and can be run against the local host
using the following command::
$ insights-run -p examples.rules.sample_script
or from the examples/rules directory::
$ ./sample_rules.py
"""
from insights.core.plugins import make_fail, make_pass, rule
from insights.parsers.redhat_release import RedhatRelease
# Jinga template for message to be displayed for either
# response tag
CONTENT = "This machine runs {{product}}."
@rule(RedhatRelease, content=CONTENT)
def report(rel):
"""Fires if the machine is running Fedora."""
if "Fedora" in rel.product:
return make_pass("IS_FEDORA", product=rel.product)
else:
return make_fail("IS_NOT_FEDORA", product=rel.product)
if __name__ == "__main__":
from insights import run
run(report, print_summary=True)
| {
"content_hash": "9c9e62ab41c9c505242f2aa38fbd6d3b",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 62,
"avg_line_length": 24.705882352941178,
"alnum_prop": 0.694047619047619,
"repo_name": "RedHatInsights/insights-core",
"id": "7b574b8531a359fcf1ff3f2872f7c12b6c4c8a1c",
"size": "862",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/rules/sample_script.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "220"
},
{
"name": "Python",
"bytes": "8219046"
},
{
"name": "Shell",
"bytes": "1754"
}
],
"symlink_target": ""
} |
from threading import current_thread
_requests = {}
def current_user():
t = current_thread()
if t not in _requests:
return None
return _requests[t].user
class RequestMiddleware(object):
def process_request(self, request):
_requests[current_thread()] = request
| {
"content_hash": "127d0dd5f99784110097a3cf171e08cd",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 45,
"avg_line_length": 22.692307692307693,
"alnum_prop": 0.6677966101694915,
"repo_name": "cscanlin/munger-builder",
"id": "2932e7d525a5949548ea0b2f4e1cbe6c0f2a97d4",
"size": "295",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "script_builder/current_user.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "51193"
},
{
"name": "HTML",
"bytes": "28701"
},
{
"name": "JavaScript",
"bytes": "129371"
},
{
"name": "Python",
"bytes": "52431"
},
{
"name": "Shell",
"bytes": "615"
}
],
"symlink_target": ""
} |
"""Classes to handle Unix style, MMDF style, and MH style mailboxes."""
import rfc822
import os,string
import re
__all__ = ["UnixMailbox","MmdfMailbox","MHMailbox","Maildir","BabylMailbox"]
class _Mailbox:
def __init__(self, fp, factory=rfc822.Message):
self.fp = fp
self.seekp = 0
self.factory = factory
def __iter__(self):
return iter(self.next, None)
def next(self):
while 1:
self.fp.seek(self.seekp)
try:
self._search_start()
except EOFError:
self.seekp = self.fp.tell()
return None
start = self.fp.tell()
self._search_end()
self.seekp = stop = self.fp.tell()
if start != stop:
break
return self.factory(_Subfile(self.fp, start, stop))
class _Subfile:
def __init__(self, fp, start, stop):
self.fp = fp
self.start = start
self.stop = stop
self.pos = self.start
def read(self, length = None):
if self.pos >= self.stop:
return ''
remaining = self.stop - self.pos
if length is None or length < 0:
length = remaining
elif length > remaining:
length = remaining
self.fp.seek(self.pos)
data = self.fp.read(length)
self.pos = self.fp.tell()
return data
def readline(self, length = None):
if self.pos >= self.stop:
return ''
if length is None:
length = self.stop - self.pos
self.fp.seek(self.pos)
data = self.fp.readline(length)
self.pos = self.fp.tell()
return data
def readlines(self, sizehint = -1):
lines = []
while 1:
line = self.readline()
if not line:
break
lines.append(line)
if sizehint >= 0:
sizehint = sizehint - len(line)
if sizehint <= 0:
break
return lines
def tell(self):
return self.pos - self.start
def seek(self, pos, whence=0):
if whence == 0:
self.pos = self.start + pos
elif whence == 1:
self.pos = self.pos + pos
elif whence == 2:
self.pos = self.stop + pos
def close(self):
del self.fp
class UnixMailbox(_Mailbox):
def _search_start(self):
while 1:
pos = self.fp.tell()
line = self.fp.readline()
if not line:
raise EOFError
if line[:5] == 'From ' and self._isrealfromline(line):
self.fp.seek(pos)
return
def _search_end(self):
self.fp.readline() # Throw away header line
while 1:
pos = self.fp.tell()
line = self.fp.readline()
if not line:
return
if line[:5] == 'From ' and self._isrealfromline(line):
self.fp.seek(pos)
return
# An overridable mechanism to test for From-line-ness. You can either
# specify a different regular expression or define a whole new
# _isrealfromline() method. Note that this only gets called for lines
# starting with the 5 characters "From ".
#
# BAW: According to
#http://home.netscape.com/eng/mozilla/2.0/relnotes/demo/content-length.html
# the only portable, reliable way to find message delimiters in a BSD (i.e
# Unix mailbox) style folder is to search for "\n\nFrom .*\n", or at the
# beginning of the file, "^From .*\n". While _fromlinepattern below seems
# like a good idea, in practice, there are too many variations for more
# strict parsing of the line to be completely accurate.
#
# _strict_isrealfromline() is the old version which tries to do stricter
# parsing of the From_ line. _portable_isrealfromline() simply returns
# true, since it's never called if the line doesn't already start with
# "From ".
#
# This algorithm, and the way it interacts with _search_start() and
# _search_end() may not be completely correct, because it doesn't check
# that the two characters preceding "From " are \n\n or the beginning of
# the file. Fixing this would require a more extensive rewrite than is
# necessary. For convenience, we've added a StrictUnixMailbox class which
# uses the older, more strict _fromlinepattern regular expression.
_fromlinepattern = r"From \s*[^\s]+\s+\w\w\w\s+\w\w\w\s+\d?\d\s+" \
r"\d?\d:\d\d(:\d\d)?(\s+[^\s]+)?\s+\d\d\d\d\s*$"
_regexp = None
def _strict_isrealfromline(self, line):
if not self._regexp:
import re
self._regexp = re.compile(self._fromlinepattern)
return self._regexp.match(line)
def _portable_isrealfromline(self, line):
return 1
_isrealfromline = _strict_isrealfromline
class PortableUnixMailbox(UnixMailbox):
_isrealfromline = UnixMailbox._portable_isrealfromline
class UberUnixMailbox(PortableUnixMailbox):
_contentlengthpattern = "^Content-Length: ([0-9]+)"
def _search_end(self):
self.fp.readline() # Throw away header line
while 1:
pos = self.fp.tell()
line = self.fp.readline()
if not line:
return
if string.strip(line) == "": # we made it to the body...
return UnixMailbox._search_end(self)
m = re.match(self._contentlengthpattern,line)
if m:
clen = int(m.group(1))
while 1:
pos = self.fp.tell()
line = self.fp.readline()
if not line: return
if string.strip(line) == "": # we made it to the body...
content = self.fp.read(clen)
# should check for existance of new "From" marker
return
class MmdfMailbox(_Mailbox):
def _search_start(self):
while 1:
line = self.fp.readline()
if not line:
raise EOFError
if line[:5] == '\001\001\001\001\n':
return
def _search_end(self):
while 1:
pos = self.fp.tell()
line = self.fp.readline()
if not line:
return
if line == '\001\001\001\001\n':
self.fp.seek(pos)
return
class MHMailbox:
def __init__(self, dirname, factory=rfc822.Message):
import re
pat = re.compile('^[1-9][0-9]*$')
self.dirname = dirname
# the three following lines could be combined into:
# list = map(long, filter(pat.match, os.listdir(self.dirname)))
list = os.listdir(self.dirname)
list = filter(pat.match, list)
list = map(long, list)
list.sort()
# This only works in Python 1.6 or later;
# before that str() added 'L':
self.boxes = map(str, list)
self.factory = factory
def __iter__(self):
return iter(self.next, None)
def next(self):
if not self.boxes:
return None
fn = self.boxes[0]
del self.boxes[0]
fp = open(os.path.join(self.dirname, fn))
return self.factory(fp)
class Maildir:
# Qmail directory mailbox
def __init__(self, dirname, factory=rfc822.Message):
self.dirname = dirname
self.factory = factory
# check for new mail
newdir = os.path.join(self.dirname, 'new')
boxes = [os.path.join(newdir, f)
for f in os.listdir(newdir) if f[0] != '.']
# Now check for current mail in this maildir
curdir = os.path.join(self.dirname, 'cur')
boxes += [os.path.join(curdir, f)
for f in os.listdir(curdir) if f[0] != '.']
self.boxes = boxes
def __iter__(self):
return iter(self.next, None)
def next(self):
if not self.boxes:
return None
fn = self.boxes[0]
del self.boxes[0]
fp = open(fn)
return self.factory(fp)
class BabylMailbox(_Mailbox):
def _search_start(self):
while 1:
line = self.fp.readline()
if not line:
raise EOFError
if line == '*** EOOH ***\n':
return
def _search_end(self):
while 1:
pos = self.fp.tell()
line = self.fp.readline()
if not line:
return
if line == '\037\014\n':
self.fp.seek(pos)
return
def _test():
import sys
args = sys.argv[1:]
if not args:
for key in 'MAILDIR', 'MAIL', 'LOGNAME', 'USER':
if os.environ.has_key(key):
mbox = os.environ[key]
break
else:
print "$MAIL, $LOGNAME nor $USER set -- who are you?"
return
else:
mbox = args[0]
if mbox[:1] == '+':
mbox = os.environ['HOME'] + '/Mail/' + mbox[1:]
elif not '/' in mbox:
mbox = '/usr/mail/' + mbox
if os.path.isdir(mbox):
if os.path.isdir(os.path.join(mbox, 'cur')):
mb = Maildir(mbox)
else:
mb = MHMailbox(mbox)
else:
fp = open(mbox, 'r')
mb = UnixMailbox(fp)
msgs = []
while 1:
msg = mb.next()
if msg is None:
break
msgs.append(msg)
if len(args) <= 1:
msg.fp = None
if len(args) > 1:
num = int(args[1])
print 'Message %d body:'%num
msg = msgs[num-1]
msg.rewindbody()
sys.stdout.write(msg.fp.read())
else:
print 'Mailbox',mbox,'has',len(msgs),'messages:'
for msg in msgs:
f = msg.getheader('from') or ""
s = msg.getheader('subject') or ""
d = msg.getheader('date') or ""
print '-%20.20s %20.20s %-30.30s'%(f, d[5:], s)
if __name__ == '__main__':
_test()
| {
"content_hash": "00f2ea5648d102d0b3d9861c89c00553",
"timestamp": "",
"source": "github",
"line_count": 339,
"max_line_length": 79,
"avg_line_length": 29.926253687315633,
"alnum_prop": 0.5212419911286348,
"repo_name": "jeske/csla",
"id": "0d7f432ad61b3c06335f5b4d76df4d36554e1a11",
"size": "10169",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pysrc/mymailbox.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "C#",
"bytes": "172953"
},
{
"name": "Python",
"bytes": "456465"
},
{
"name": "Shell",
"bytes": "85"
}
],
"symlink_target": ""
} |
try:
import xml.etree.cElementTree as ET
except ImportError:
import xml.etree.ElementTree as ET
import ddt
import mock
from oslo_config import cfg
import six
import time
from eventlet import greenthread
from manila import exception
from manila.share.drivers.qnap import api
from manila.share.drivers.qnap import qnap
from manila.share import share_types
from manila import test
from manila.tests import fake_share
from manila.tests.share.drivers.qnap import fakes
CONF = cfg.CONF
def create_configuration(management_url, qnap_share_ip, qnap_nas_login,
qnap_nas_password, qnap_poolname):
"""Create configuration."""
configuration = mock.Mock()
configuration.qnap_management_url = management_url
configuration.qnap_share_ip = qnap_share_ip
configuration.qnap_nas_login = qnap_nas_login
configuration.qnap_nas_password = qnap_nas_password
configuration.qnap_poolname = qnap_poolname
configuration.safe_get.return_value = False
return configuration
class QnapShareDriverBaseTestCase(test.TestCase):
"""Base Class for the QnapShareDriver Tests."""
def setUp(self):
"""Setup the Qnap Driver Base TestCase."""
super(QnapShareDriverBaseTestCase, self).setUp()
self.driver = None
self.share_api = None
def _do_setup(self, management_url, share_ip, nas_login,
nas_password, poolname, **kwargs):
"""Config do setup configurations."""
self.driver = qnap.QnapShareDriver(
configuration=create_configuration(
management_url,
share_ip,
nas_login,
nas_password,
poolname),
private_storage=kwargs.get('private_storage'))
self.driver.do_setup('context')
@ddt.ddt
class QnapShareDriverLoginTestCase(QnapShareDriverBaseTestCase):
"""Tests do_setup api."""
def setUp(self):
"""Setup the Qnap Share Driver login TestCase."""
super(QnapShareDriverLoginTestCase, self).setUp()
self.mock_object(six.moves.http_client, 'HTTPConnection')
self.mock_object(six.moves.http_client, 'HTTPSConnection')
@ddt.unpack
@ddt.data({'mng_url': 'http://1.2.3.4:8080', 'port': '8080', 'ssl': False},
{'mng_url': 'https://1.2.3.4:443', 'port': '443', 'ssl': True})
def test_do_setup_positive(self, mng_url, port, ssl):
"""Test do_setup with http://1.2.3.4:8080."""
fake_login_response = fakes.FakeLoginResponse()
fake_get_basic_info_response_es = (
fakes.FakeGetBasicInfoResponseEs_1_1_3())
if ssl:
mock_connection = six.moves.http_client.HTTPSConnection
else:
mock_connection = six.moves.http_client.HTTPConnection
mock_connection.return_value.getresponse.side_effect = [
fake_login_response,
fake_get_basic_info_response_es,
fake_login_response]
self._do_setup(mng_url, '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1')
self.assertEqual(
mng_url,
self.driver.configuration.qnap_management_url)
self.assertEqual(
'1.2.3.4', self.driver.configuration.qnap_share_ip)
self.assertEqual(
'admin', self.driver.configuration.qnap_nas_login)
self.assertEqual(
'qnapadmin', self.driver.configuration.qnap_nas_password)
self.assertEqual(
'Storage Pool 1', self.driver.configuration.qnap_poolname)
self.assertEqual('fakeSid', self.driver.api_executor.sid)
self.assertEqual('admin', self.driver.api_executor.username)
self.assertEqual('qnapadmin', self.driver.api_executor.password)
self.assertEqual('1.2.3.4', self.driver.api_executor.ip)
self.assertEqual(port, self.driver.api_executor.port)
self.assertEqual(ssl, self.driver.api_executor.ssl)
@ddt.data(fakes.FakeGetBasicInfoResponseTs_4_3_0(),
fakes.FakeGetBasicInfoResponseTesTs_4_3_0(),
fakes.FakeGetBasicInfoResponseTesEs_1_1_3())
def test_do_setup_positive_with_diff_nas(self, fake_basic_info):
"""Test do_setup with different NAS model."""
fake_login_response = fakes.FakeLoginResponse()
mock_connection = six.moves.http_client.HTTPSConnection
mock_connection.return_value.getresponse.side_effect = [
fake_login_response,
fake_basic_info,
fake_login_response]
self._do_setup('https://1.2.3.4:443', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1')
self.assertEqual('fakeSid', self.driver.api_executor.sid)
self.assertEqual('admin', self.driver.api_executor.username)
self.assertEqual('qnapadmin', self.driver.api_executor.password)
self.assertEqual('1.2.3.4', self.driver.api_executor.ip)
self.assertEqual('443', self.driver.api_executor.port)
self.assertTrue(self.driver.api_executor.ssl)
@ddt.data({
'fake_basic_info': fakes.FakeGetBasicInfoResponseTs_4_3_0(),
'expect_result': api.QnapAPIExecutorTS
}, {
'fake_basic_info': fakes.FakeGetBasicInfoResponseTesTs_4_3_0(),
'expect_result': api.QnapAPIExecutorTS
}, {
'fake_basic_info': fakes.FakeGetBasicInfoResponseTesEs_1_1_3(),
'expect_result': api.QnapAPIExecutor
}, {
'fake_basic_info': fakes.FakeGetBasicInfoResponseEs_1_1_3(),
'expect_result': api.QnapAPIExecutor
})
@ddt.unpack
def test_create_api_executor(self, fake_basic_info, expect_result):
"""Test do_setup with different NAS model."""
fake_login_response = fakes.FakeLoginResponse()
mock_connection = six.moves.http_client.HTTPSConnection
mock_connection.return_value.getresponse.side_effect = [
fake_login_response,
fake_basic_info,
fake_login_response]
self._do_setup('https://1.2.3.4:443', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1')
self.assertIsInstance(self.driver.api_executor, expect_result)
@ddt.data({
'fake_basic_info': fakes.FakeGetBasicInfoResponseTs_4_0_0(),
'expect_result': exception.ShareBackendException
}, {
'fake_basic_info': fakes.FakeGetBasicInfoResponseTesTs_4_0_0(),
'expect_result': exception.ShareBackendException
}, {
'fake_basic_info': fakes.FakeGetBasicInfoResponseTesEs_1_1_1(),
'expect_result': exception.ShareBackendException
}, {
'fake_basic_info': fakes.FakeGetBasicInfoResponseEs_1_1_1(),
'expect_result': exception.ShareBackendException
})
@ddt.unpack
def test_create_api_executor_negative(self,
fake_basic_info, expect_result):
"""Test do_setup with different NAS model."""
fake_login_response = fakes.FakeLoginResponse()
mock_connection = six.moves.http_client.HTTPSConnection
mock_connection.return_value.getresponse.side_effect = [
fake_login_response,
fake_basic_info,
fake_login_response]
self.assertRaises(
exception.ShareBackendException,
self._do_setup,
'https://1.2.3.4:443',
'1.2.3.4',
'admin',
'qnapadmin',
'Storage Pool 1')
def test_do_setup_with_exception(self):
"""Test do_setup with exception."""
fake_login_response = fakes.FakeLoginResponse()
fake_get_basic_info_response_error = (
fakes.FakeGetBasicInfoResponseError())
mock_connection = six.moves.http_client.HTTPSConnection
mock_connection.return_value.getresponse.side_effect = [
fake_login_response,
fake_get_basic_info_response_error,
fake_login_response]
self.driver = qnap.QnapShareDriver(
configuration=create_configuration(
'https://1.2.3.4:443', '1.2.3.4', 'admin',
'qnapadmin', 'Pool1'))
self.assertRaises(
exception.ShareBackendException,
self.driver.do_setup,
context='context')
def test_check_for_setup_error(self):
"""Test do_setup with exception."""
self.driver = qnap.QnapShareDriver(
configuration=create_configuration(
'https://1.2.3.4:443', '1.2.3.4', 'admin',
'qnapadmin', 'Pool1'))
self.assertRaises(
exception.ShareBackendException,
self.driver.check_for_setup_error)
@ddt.ddt
class QnapShareDriverTestCase(QnapShareDriverBaseTestCase):
"""Tests share driver functions."""
def setUp(self):
"""Setup the Qnap Driver Base TestCase."""
super(QnapShareDriverTestCase, self).setUp()
self.mock_object(qnap.QnapShareDriver, '_create_api_executor')
self.share = fake_share.fake_share(
share_proto='NFS',
id='shareId',
display_name='fakeDisplayName',
export_locations=[{'path': '1.2.3.4:/share/fakeShareName'}],
host='QnapShareDriver',
size=10)
def get_share_info_return_value(self):
"""Return the share info form get_share_info method."""
root = ET.fromstring(fakes.FAKE_RES_DETAIL_DATA_SHARE_INFO)
share_list = root.find('Volume_Info')
share_info_tree = share_list.findall('row')
for share in share_info_tree:
return share
def get_snapshot_info_return_value(self):
"""Return the snapshot info form get_snapshot_info method."""
root = ET.fromstring(fakes.FAKE_RES_DETAIL_DATA_SNAPSHOT)
snapshot_list = root.find('SnapshotList')
snapshot_info_tree = snapshot_list.findall('row')
for snapshot in snapshot_info_tree:
return snapshot
def get_specific_volinfo_return_value(self):
"""Return the volume info form get_specific_volinfo method."""
root = ET.fromstring(fakes.FAKE_RES_DETAIL_DATA_VOLUME_INFO)
volume_list = root.find('Volume_Info')
volume_info_tree = volume_list.findall('row')
for volume in volume_info_tree:
return volume
def get_specific_poolinfo_return_value(self):
"""Get specific pool info."""
root = ET.fromstring(fakes.FAKE_RES_DETAIL_DATA_SPECIFIC_POOL_INFO)
pool_list = root.find('Pool_Index')
pool_info_tree = pool_list.findall('row')
for pool in pool_info_tree:
return pool
def get_host_list_return_value(self):
"""Get host list."""
root = ET.fromstring(fakes.FAKE_RES_DETAIL_DATA_GET_HOST_LIST)
hosts = []
host_list = root.find('host_list')
host_tree = host_list.findall('host')
for host in host_tree:
hosts.append(host)
return hosts
@ddt.data({
'fake_extra_spec': {},
'expect_extra_spec': {
'qnap_thin_provision': True,
'qnap_compression': True,
'qnap_deduplication': False,
'qnap_ssd_cache': False
}
}, {
'fake_extra_spec': {
'thin_provisioning': u'true',
'compression': u'true',
'qnap_ssd_cache': u'true'
},
'expect_extra_spec': {
'qnap_thin_provision': True,
'qnap_compression': True,
'qnap_deduplication': False,
'qnap_ssd_cache': True
}
}, {
'fake_extra_spec': {
'thin_provisioning': u'<is> False',
'compression': u'<is> True',
'qnap_ssd_cache': u'<is> True'
},
'expect_extra_spec': {
'qnap_thin_provision': False,
'qnap_compression': True,
'qnap_deduplication': False,
'qnap_ssd_cache': True
}
}, {
'fake_extra_spec': {
'thin_provisioning': u'true',
'dedupe': u'<is> True',
'qnap_ssd_cache': u'False'
},
'expect_extra_spec': {
'qnap_thin_provision': True,
'qnap_compression': True,
'qnap_deduplication': True,
'qnap_ssd_cache': False
}
}, {
'fake_extra_spec': {
'thin_provisioning': u'<is> False',
'compression': u'false',
'dedupe': u'<is> False',
'qnap_ssd_cache': u'<is> False'
},
'expect_extra_spec': {
'qnap_thin_provision': False,
'qnap_compression': False,
'qnap_deduplication': False,
'qnap_ssd_cache': False
}
})
@ddt.unpack
@mock.patch.object(qnap.QnapShareDriver, '_get_location_path')
@mock.patch.object(qnap.QnapShareDriver, '_gen_random_name')
def test_create_share_positive(
self,
mock_gen_random_name,
mock_get_location_path,
fake_extra_spec, expect_extra_spec):
"""Test create share."""
mock_api_executor = qnap.QnapShareDriver._create_api_executor
mock_api_executor.return_value.get_share_info.side_effect = [
None, self.get_share_info_return_value()]
mock_gen_random_name.return_value = 'fakeShareName'
mock_api_executor.return_value.create_share.return_value = (
'fakeCreateShareId')
mock_get_location_path.return_value = None
mock_private_storage = mock.Mock()
self.mock_object(greenthread, 'sleep')
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.mock_object(share_types, 'get_extra_specs_from_share',
mock.Mock(return_value=fake_extra_spec))
self.driver.create_share('context', self.share)
mock_api_return = mock_api_executor.return_value
expected_call_list = [
mock.call('Storage Pool 1', vol_label='fakeShareName'),
mock.call('Storage Pool 1', vol_label='fakeShareName')]
self.assertEqual(
expected_call_list,
mock_api_return.get_share_info.call_args_list)
mock_api_executor.return_value.create_share.assert_called_once_with(
self.share,
self.driver.configuration.qnap_poolname,
'fakeShareName',
'NFS',
**expect_extra_spec)
mock_get_location_path.assert_called_once_with(
'fakeShareName', 'NFS', '1.2.3.4', 'fakeNo')
@mock.patch.object(qnap.QnapShareDriver, '_get_location_path')
@mock.patch.object(qnap.QnapShareDriver, '_gen_random_name')
def test_create_share_negative_share_exist(
self,
mock_gen_random_name,
mock_get_location_path):
"""Test create share."""
mock_api_executor = qnap.QnapShareDriver._create_api_executor
mock_api_executor.return_value.get_share_info.return_value = (
self.get_share_info_return_value())
mock_gen_random_name.return_value = 'fakeShareName'
mock_get_location_path.return_value = None
mock_private_storage = mock.Mock()
self.mock_object(time, 'sleep')
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.mock_object(share_types, 'get_extra_specs_from_share',
mock.Mock(return_value={}))
self.assertRaises(
exception.ShareBackendException,
self.driver.create_share,
context='context',
share=self.share)
@mock.patch.object(qnap.QnapShareDriver, '_get_location_path')
@mock.patch.object(qnap.QnapShareDriver, '_gen_random_name')
def test_create_share_negative_create_fail(
self,
mock_gen_random_name,
mock_get_location_path):
"""Test create share."""
mock_api_executor = qnap.QnapShareDriver._create_api_executor
mock_api_executor.return_value.get_share_info.return_value = None
mock_gen_random_name.return_value = 'fakeShareName'
mock_get_location_path.return_value = None
mock_private_storage = mock.Mock()
self.mock_object(time, 'sleep')
self.mock_object(greenthread, 'sleep')
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.mock_object(share_types, 'get_extra_specs_from_share',
mock.Mock(return_value={}))
self.assertRaises(
exception.ShareBackendException,
self.driver.create_share,
context='context',
share=self.share)
@mock.patch.object(qnap.QnapShareDriver, '_get_location_path')
@mock.patch.object(qnap.QnapShareDriver, '_gen_random_name')
def test_create_share_negative_configutarion(
self,
mock_gen_random_name,
mock_get_location_path):
"""Test create share."""
mock_api_executor = qnap.QnapShareDriver._create_api_executor
mock_api_executor.return_value.get_share_info.side_effect = [
None, self.get_share_info_return_value()]
mock_gen_random_name.return_value = 'fakeShareName'
mock_get_location_path.return_value = None
mock_private_storage = mock.Mock()
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.mock_object(share_types, 'get_extra_specs_from_share',
mock.Mock(return_value={
'dedupe': 'true',
'thin_provisioning': 'false'}))
self.assertRaises(
exception.InvalidExtraSpec,
self.driver.create_share,
context='context',
share=self.share)
def test_delete_share_positive(self):
"""Test delete share with fake_share."""
mock_api_executor = qnap.QnapShareDriver._create_api_executor
mock_api_executor.return_value.get_share_info.return_value = (
self.get_share_info_return_value())
mock_api_executor.return_value.delete_share.return_value = (
'fakeCreateShareId')
mock_private_storage = mock.Mock()
mock_private_storage.get.return_value = 'fakeVolNo'
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.driver.delete_share('context', self.share, share_server=None)
mock_api_executor.return_value.get_share_info.assert_called_once_with(
'Storage Pool 1', vol_no='fakeVolNo')
mock_api_executor.return_value.delete_share.assert_called_once_with(
'fakeNo')
def test_delete_share_no_volid(self):
"""Test delete share with fake_share and no volID."""
mock_private_storage = mock.Mock()
mock_private_storage.get.return_value = None
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.driver.delete_share('context', self.share, share_server=None)
mock_private_storage.get.assert_called_once_with(
'shareId', 'volID')
def test_delete_share_no_delete_share(self):
"""Test delete share with fake_share."""
mock_api_executor = qnap.QnapShareDriver._create_api_executor
mock_api_executor.return_value.get_share_info.return_value = None
mock_api_executor.return_value.delete_share.return_value = (
'fakeCreateShareId')
mock_private_storage = mock.Mock()
mock_private_storage.get.return_value = 'fakeVolNo'
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.driver.delete_share('context', self.share, share_server=None)
mock_api_executor.return_value.get_share_info.assert_called_once_with(
'Storage Pool 1', vol_no='fakeVolNo')
def test_extend_share(self):
"""Test extend share with fake_share."""
mock_api_executor = qnap.QnapShareDriver._create_api_executor
mock_api_executor.return_value.get_share_info.return_value = (
self.get_share_info_return_value())
mock_api_executor.return_value.edit_share.return_value = None
mock_private_storage = mock.Mock()
mock_private_storage.get.side_effect = [
'fakeVolName',
'True',
'True',
'False',
'False']
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.driver.extend_share(self.share, 100, share_server=None)
expect_share_dict = {
'sharename': 'fakeVolName',
'old_sharename': 'fakeVolName',
'new_size': 100,
'thin_provision': True,
'compression': True,
'deduplication': False,
'ssd_cache': False,
'share_proto': 'NFS'
}
mock_api_executor.return_value.edit_share.assert_called_once_with(
expect_share_dict)
def test_extend_share_without_share_name(self):
"""Test extend share without share name."""
mock_private_storage = mock.Mock()
mock_private_storage.get.return_value = None
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.assertRaises(
exception.ShareResourceNotFound,
self.driver.extend_share,
share=self.share,
new_size=100,
share_server=None)
@mock.patch.object(qnap.QnapShareDriver, '_gen_random_name')
def test_create_snapshot(
self,
mock_gen_random_name):
"""Test create snapshot with fake_snapshot."""
fake_snapshot = fakes.SnapshotClass(
10, 'fakeShareName@fakeSnapshotName')
mock_gen_random_name.return_value = 'fakeSnapshotName'
mock_api_executor = qnap.QnapShareDriver._create_api_executor
mock_api_executor.return_value.get_snapshot_info.side_effect = [
None, self.get_snapshot_info_return_value()]
mock_api_executor.return_value.create_snapshot_api.return_value = (
'fakeCreateShareId')
mock_private_storage = mock.Mock()
mock_private_storage.get.return_value = 'fakeVolId'
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.driver.create_snapshot(
'context', fake_snapshot, share_server=None)
mock_api_return = mock_api_executor.return_value
expected_call_list = [
mock.call(volID='fakeVolId', snapshot_name='fakeSnapshotName'),
mock.call(volID='fakeVolId', snapshot_name='fakeSnapshotName')]
self.assertEqual(
expected_call_list,
mock_api_return.get_snapshot_info.call_args_list)
mock_api_return.create_snapshot_api.assert_called_once_with(
'fakeVolId', 'fakeSnapshotName')
def test_create_snapshot_without_volid(self):
"""Test create snapshot with fake_snapshot."""
fake_snapshot = fakes.SnapshotClass(10, None)
mock_private_storage = mock.Mock()
mock_private_storage.get.return_value = None
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.assertRaises(
exception.ShareResourceNotFound,
self.driver.create_snapshot,
context='context',
snapshot=fake_snapshot,
share_server=None)
def test_delete_snapshot(self):
"""Test delete snapshot with fakeSnapshot."""
fake_snapshot = fakes.SnapshotClass(
10, 'fakeShareName@fakeSnapshotName')
mock_api_executor = qnap.QnapShareDriver._create_api_executor
mock_api_executor.return_value.delete_snapshot_api.return_value = (
'fakeCreateShareId')
mock_private_storage = mock.Mock()
mock_private_storage.get.return_value = 'fakeSnapshotId'
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.driver.delete_snapshot(
'context', fake_snapshot, share_server=None)
mock_api_return = mock_api_executor.return_value
mock_api_return.delete_snapshot_api.assert_called_once_with(
'fakeShareName@fakeSnapshotName')
def test_delete_snapshot_without_snapshot_id(self):
"""Test delete snapshot with fakeSnapshot and no snapshot id."""
fake_snapshot = fakes.SnapshotClass(10, None)
mock_private_storage = mock.Mock()
mock_private_storage.get.return_value = None
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.driver.delete_snapshot(
'context', fake_snapshot, share_server=None)
mock_private_storage.get.assert_called_once_with(
'fakeSnapshotId', 'snapshot_id')
@mock.patch.object(qnap.QnapShareDriver, '_get_location_path')
@mock.patch('manila.share.API')
@mock.patch.object(qnap.QnapShareDriver, '_gen_random_name')
def test_create_share_from_snapshot(
self,
mock_gen_random_name,
mock_share_api,
mock_get_location_path):
"""Test create share from snapshot."""
fake_snapshot = fakes.SnapshotClass(
10, 'fakeShareName@fakeSnapshotName')
mock_api_executor = qnap.QnapShareDriver._create_api_executor
mock_gen_random_name.return_value = 'fakeShareName'
mock_api_executor.return_value.get_share_info.side_effect = [
None, self.get_share_info_return_value()]
mock_private_storage = mock.Mock()
mock_private_storage.get.return_value = 'fakeSnapshotId'
mock_share_api.return_value.get.return_value = {'size': 10}
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.driver.create_share_from_snapshot(
'context', self.share, fake_snapshot, share_server=None)
mock_gen_random_name.assert_called_once_with(
'share')
mock_api_return = mock_api_executor.return_value
expected_call_list = [
mock.call('Storage Pool 1', vol_label='fakeShareName'),
mock.call('Storage Pool 1', vol_label='fakeShareName')]
self.assertEqual(
expected_call_list,
mock_api_return.get_share_info.call_args_list)
mock_api_return.clone_snapshot.assert_called_once_with(
'fakeShareName@fakeSnapshotName', 'fakeShareName')
@mock.patch.object(qnap.QnapShareDriver, '_get_location_path')
@mock.patch('manila.share.API')
@mock.patch.object(qnap.QnapShareDriver, '_gen_random_name')
def test_create_share_from_snapshot_diff_size(
self,
mock_gen_random_name,
mock_share_api,
mock_get_location_path):
"""Test create share from snapshot."""
fake_snapshot = fakes.SnapshotClass(
10, 'fakeShareName@fakeSnapshotName')
mock_gen_random_name.return_value = 'fakeShareName'
mock_api_executor = qnap.QnapShareDriver._create_api_executor
mock_api_executor.return_value.get_share_info.side_effect = [
None, self.get_share_info_return_value()]
mock_private_storage = mock.Mock()
mock_private_storage.get.side_effect = [
'True',
'True',
'False',
'False',
'fakeVolName']
mock_share_api.return_value.get.return_value = {'size': 5}
mock_api_executor.return_value.edit_share.return_value = (
None)
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.driver.create_share_from_snapshot(
'context', self.share, fake_snapshot, share_server=None)
mock_gen_random_name.assert_called_once_with(
'share')
mock_api_return = mock_api_executor.return_value
expected_call_list = [
mock.call('Storage Pool 1', vol_label='fakeShareName'),
mock.call('Storage Pool 1', vol_label='fakeShareName')]
self.assertEqual(
expected_call_list,
mock_api_return.get_share_info.call_args_list)
mock_api_return.clone_snapshot.assert_called_once_with(
'fakeShareName@fakeSnapshotName', 'fakeShareName')
expect_share_dict = {
'sharename': 'fakeShareName',
'old_sharename': 'fakeShareName',
'new_size': 10,
'thin_provision': True,
'compression': True,
'deduplication': False,
'ssd_cache': False,
'share_proto': 'NFS'
}
mock_api_return.edit_share.assert_called_once_with(
expect_share_dict)
def test_create_share_from_snapshot_without_snapshot_id(self):
"""Test create share from snapshot."""
fake_snapshot = fakes.SnapshotClass(10, None)
mock_private_storage = mock.Mock()
mock_private_storage.get.return_value = None
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.assertRaises(
exception.SnapshotResourceNotFound,
self.driver.create_share_from_snapshot,
context='context',
share=self.share,
snapshot=fake_snapshot,
share_server=None)
@mock.patch.object(qnap.QnapShareDriver, '_get_location_path')
@mock.patch('manila.share.API')
@mock.patch.object(qnap.QnapShareDriver, '_gen_random_name')
def test_create_share_from_snapshot_negative_name_exist(
self,
mock_gen_random_name,
mock_share_api,
mock_get_location_path):
"""Test create share from snapshot."""
fake_snapshot = fakes.SnapshotClass(
10, 'fakeShareName@fakeSnapshotName')
mock_api_executor = qnap.QnapShareDriver._create_api_executor
mock_gen_random_name.return_value = 'fakeShareName'
mock_api_executor.return_value.get_share_info.return_value = (
self.get_share_info_return_value())
mock_private_storage = mock.Mock()
mock_private_storage.get.return_value = 'fakeSnapshotId'
mock_share_api.return_value.get.return_value = {'size': 10}
self.mock_object(time, 'sleep')
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.assertRaises(
exception.ShareBackendException,
self.driver.create_share_from_snapshot,
context='context',
share=self.share,
snapshot=fake_snapshot,
share_server=None)
@mock.patch.object(qnap.QnapShareDriver, '_get_location_path')
@mock.patch('manila.share.API')
@mock.patch.object(qnap.QnapShareDriver, '_gen_random_name')
def test_create_share_from_snapshot_negative_clone_fail(
self,
mock_gen_random_name,
mock_share_api,
mock_get_location_path):
"""Test create share from snapshot."""
fake_snapshot = fakes.SnapshotClass(
10, 'fakeShareName@fakeSnapshotName')
mock_api_executor = qnap.QnapShareDriver._create_api_executor
mock_gen_random_name.return_value = 'fakeShareName'
mock_api_executor.return_value.get_share_info.return_value = None
mock_private_storage = mock.Mock()
mock_private_storage.get.return_value = 'fakeSnapshotId'
mock_share_api.return_value.get.return_value = {'size': 10}
self.mock_object(time, 'sleep')
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.assertRaises(
exception.ShareBackendException,
self.driver.create_share_from_snapshot,
context='context',
share=self.share,
snapshot=fake_snapshot,
share_server=None)
@mock.patch.object(qnap.QnapShareDriver, '_get_timestamp_from_vol_name')
@mock.patch.object(qnap.QnapShareDriver, '_allow_access')
@ddt.data('fakeHostName', 'fakeHostNameNotMatch')
def test_update_access_allow_access(
self, fakeHostName, mock_allow_access,
mock_get_timestamp_from_vol_name):
"""Test update access with allow access rules."""
mock_private_storage = mock.Mock()
mock_private_storage.get.return_value = 'fakeVolName'
mock_api_executor = qnap.QnapShareDriver._create_api_executor
mock_api_executor.return_value.get_host_list.return_value = (
self.get_host_list_return_value())
mock_api_executor.return_value.set_nfs_access.return_value = None
mock_api_executor.return_value.delete_host.return_value = None
mock_allow_access.return_value = None
mock_get_timestamp_from_vol_name.return_value = fakeHostName
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.driver.update_access(
'context', self.share, 'access_rules',
None, None, share_server=None)
mock_api_executor.return_value.set_nfs_access.assert_called_once_with(
'fakeVolName', 2, 'all')
@mock.patch.object(qnap.QnapShareDriver, '_allow_access')
@mock.patch.object(qnap.QnapShareDriver, '_deny_access')
def test_update_access_deny_and_allow_access(
self,
mock_deny_access,
mock_allow_access):
"""Test update access with deny and allow access rules."""
mock_private_storage = mock.Mock()
mock_private_storage.get.return_value = 'fakeVolName'
mock_deny_access.return_value = None
mock_allow_access.return_value = None
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
delete_rules = []
delete_rules.append('access1')
add_rules = []
add_rules.append('access1')
self.driver.update_access(
'context', self.share, None,
add_rules, delete_rules, share_server=None)
mock_deny_access.assert_called_once_with(
'context', self.share, 'access1', None)
mock_allow_access.assert_called_once_with(
'context', self.share, 'access1', None)
def test_update_access_without_volname(self):
"""Test update access without volName."""
mock_private_storage = mock.Mock()
mock_private_storage.get.return_value = None
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.assertRaises(
exception.ShareResourceNotFound,
self.driver.update_access,
context='context',
share=self.share,
access_rules='access_rules',
add_rules=None,
delete_rules=None,
share_server=None)
@mock.patch.object(qnap.QnapShareDriver, '_get_location_path')
def test_manage_existing_nfs(
self,
mock_get_location_path):
"""Test manage existing."""
mock_api_executor = qnap.QnapShareDriver._create_api_executor
mock_api_executor.return_value.get_share_info.return_value = (
self.get_share_info_return_value())
mock_private_storage = mock.Mock()
mock_private_storage.update.return_value = None
mock_private_storage.get.side_effect = [
'fakeVolId',
'fakeVolName']
mock_api_executor.return_value.get_specific_volinfo.return_value = (
self.get_specific_volinfo_return_value())
mock_api_executor.return_value.get_share_info.return_value = (
self.get_share_info_return_value())
mock_get_location_path.return_value = None
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.mock_object(share_types, 'get_extra_specs_from_share',
mock.Mock(return_value={}))
self.driver.manage_existing(self.share, 'driver_options')
mock_api_return = mock_api_executor.return_value
mock_api_return.get_share_info.assert_called_once_with(
'Storage Pool 1', vol_label='fakeShareName')
mock_api_return.get_specific_volinfo.assert_called_once_with(
'fakeNo')
mock_get_location_path.assert_called_once_with(
'fakeShareName', 'NFS', '1.2.3.4', 'fakeNo')
@mock.patch.object(qnap.QnapShareDriver, '_get_location_path')
def test_manage_existing_nfs_negative_configutarion(
self,
mock_get_location_path):
"""Test manage existing."""
mock_api_executor = qnap.QnapShareDriver._create_api_executor
mock_api_executor.return_value.get_share_info.return_value = (
self.get_share_info_return_value())
mock_private_storage = mock.Mock()
mock_private_storage.update.return_value = None
mock_private_storage.get.side_effect = [
'fakeVolId',
'fakeVolName']
mock_api_executor.return_value.get_specific_volinfo.return_value = (
self.get_specific_volinfo_return_value())
mock_api_executor.return_value.get_share_info.return_value = (
self.get_share_info_return_value())
mock_get_location_path.return_value = None
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.mock_object(share_types, 'get_extra_specs_from_share',
mock.Mock(return_value={
'dedupe': 'true',
'thin_provisioning': 'false'}))
self.assertRaises(
exception.InvalidExtraSpec,
self.driver.manage_existing,
share=self.share,
driver_options='driver_options')
def test_manage_invalid_protocol(self):
"""Test manage existing."""
share = fake_share.fake_share(
share_proto='fakeProtocol',
id='fakeId',
display_name='fakeDisplayName',
export_locations=[{'path': ''}],
host='QnapShareDriver',
size=10)
mock_private_storage = mock.Mock()
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.assertRaises(
exception.InvalidInput,
self.driver.manage_existing,
share=share,
driver_options='driver_options')
def test_manage_existing_nfs_without_export_locations(self):
share = fake_share.fake_share(
share_proto='NFS',
id='fakeId',
display_name='fakeDisplayName',
export_locations=[{'path': ''}],
host='QnapShareDriver',
size=10)
mock_private_storage = mock.Mock()
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.assertRaises(
exception.ShareBackendException,
self.driver.manage_existing,
share=share,
driver_options='driver_options')
@mock.patch.object(qnap.QnapShareDriver, '_get_location_path')
def test_manage_existing_nfs_ip_not_equel_share_ip(
self,
mock_get_location_path):
"""Test manage existing with nfs ip not equel to share ip."""
mock_api_executor = qnap.QnapShareDriver._create_api_executor
mock_api_executor.return_value.get_share_info.return_value = (
self.get_share_info_return_value())
mock_private_storage = mock.Mock()
mock_private_storage.update.return_value = None
mock_private_storage.get.side_effect = [
'fakeVolId',
'fakeVolName']
mock_api_executor.return_value.get_specific_volinfo.return_value = (
self.get_specific_volinfo_return_value())
mock_api_executor.return_value.get_share_info.return_value = (
self.get_share_info_return_value())
mock_get_location_path.return_value = None
self._do_setup('http://1.2.3.4:8080', '1.1.1.1', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.assertRaises(
exception.ShareBackendException,
self.driver.manage_existing,
share=self.share,
driver_options='driver_options')
@mock.patch.object(qnap.QnapShareDriver, '_get_location_path')
def test_manage_existing_nfs_without_existing_share(
self,
mock_get_location_path):
"""Test manage existing nfs without existing share."""
mock_api_executor = qnap.QnapShareDriver._create_api_executor
mock_api_executor.return_value.get_share_info.return_value = (
self.get_share_info_return_value())
mock_private_storage = mock.Mock()
mock_private_storage.update.return_value = None
mock_private_storage.get.side_effect = [
'fakeVolId',
'fakeVolName']
mock_api_executor.return_value.get_specific_volinfo.return_value = (
self.get_specific_volinfo_return_value())
mock_api_executor.return_value.get_share_info.return_value = (
None)
mock_get_location_path.return_value = None
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.assertRaises(
exception.ManageInvalidShare,
self.driver.manage_existing,
share=self.share,
driver_options='driver_options')
def test_unmanage(self):
"""Test unmanage."""
mock_private_storage = mock.Mock()
mock_private_storage.delete.return_value = None
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.driver.unmanage(self.share)
mock_private_storage.delete.assert_called_once_with(
'shareId')
@mock.patch.object(qnap.QnapShareDriver, '_get_location_path')
def test_manage_existing_snapshot(
self,
mock_get_location_path):
"""Test manage existing snapshot snapshot."""
fake_snapshot = fakes.SnapshotClass(
10, 'fakeShareName@fakeSnapshotName')
mock_api_executor = qnap.QnapShareDriver._create_api_executor
mock_api_executor.return_value.get_share_info.return_value = (
self.get_share_info_return_value())
mock_private_storage = mock.Mock()
mock_private_storage.update.return_value = None
mock_private_storage.get.side_effect = [
'fakeVolId', 'fakeVolName']
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.driver.manage_existing_snapshot(fake_snapshot, 'driver_options')
mock_api_return = mock_api_executor.return_value
mock_api_return.get_share_info.assert_called_once_with(
'Storage Pool 1', vol_no='fakeVolId')
fake_metadata = {
'snapshot_id': 'fakeShareName@fakeSnapshotName'}
mock_private_storage.update.assert_called_once_with(
'fakeSnapshotId', fake_metadata)
def test_unmanage_snapshot(self):
"""Test unmanage snapshot."""
fake_snapshot = fakes.SnapshotClass(
10, 'fakeShareName@fakeSnapshotName')
mock_private_storage = mock.Mock()
mock_private_storage.delete.return_value = None
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.driver.unmanage_snapshot(fake_snapshot)
mock_private_storage.delete.assert_called_once_with(
'fakeSnapshotId')
@ddt.data(
{'expect_result': 'manila-shr-fake_time', 'test_string': 'share'},
{'expect_result': 'manila-snp-fake_time', 'test_string': 'snapshot'},
{'expect_result': 'manila-hst-fake_time', 'test_string': 'host'},
{'expect_result': 'manila-fake_time', 'test_string': ''})
@ddt.unpack
@mock.patch('oslo_utils.timeutils.utcnow')
def test_gen_random_name(
self, mock_utcnow, expect_result, test_string):
"""Test gen random name."""
mock_private_storage = mock.Mock()
mock_utcnow.return_value.strftime.return_value = 'fake_time'
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.assertEqual(
expect_result, self.driver._gen_random_name(test_string))
def test_get_location_path(self):
"""Test get location path name."""
mock_private_storage = mock.Mock()
mock_api_executor = qnap.QnapShareDriver._create_api_executor
mock_api_executor.return_value.get_share_info.return_value = (
self.get_share_info_return_value())
mock_api_executor.return_value.get_specific_volinfo.return_value = (
self.get_specific_volinfo_return_value())
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
location = 'fakeIp:fakeMountPath'
expect_result = {
'path': location,
'is_admin_only': False,
}
self.assertEqual(
expect_result, self.driver._get_location_path(
'fakeShareName', 'NFS', 'fakeIp', 'fakeVolId'))
self.assertRaises(
exception.InvalidInput,
self.driver._get_location_path,
share_name='fakeShareName',
share_proto='fakeProto',
ip='fakeIp',
vol_id='fakeVolId')
def test_update_share_stats(self):
"""Test update share stats."""
mock_private_storage = mock.Mock()
mock_api_return = (
qnap.QnapShareDriver._create_api_executor.return_value)
mock_api_return.get_specific_poolinfo.return_value = (
self.get_specific_poolinfo_return_value())
mock_api_return.get_share_info.return_value = (
self.get_share_info_return_value())
mock_api_return.get_specific_volinfo.return_value = (
self.get_specific_volinfo_return_value())
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.driver._update_share_stats()
mock_api_return.get_specific_poolinfo.assert_called_once_with(
self.driver.configuration.qnap_poolname)
def test_get_vol_host(self):
"""Test get manila host IPV4s."""
mock_private_storage = mock.Mock()
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
expect_host_dict_ips = []
host_list = self.get_host_list_return_value()
for host in host_list:
host_dict = {
'index': host.find('index').text,
'hostid': host.find('hostid').text,
'name': host.find('name').text,
'ipv4': [host.find('netaddrs').find('ipv4').text]
}
expect_host_dict_ips.append(host_dict)
self.assertEqual(
expect_host_dict_ips, self.driver._get_vol_host(
host_list, 'fakeHostName'))
@mock.patch.object(qnap.QnapShareDriver, '_gen_host_name')
@mock.patch.object(qnap.QnapShareDriver, '_get_timestamp_from_vol_name')
@mock.patch.object(qnap.QnapShareDriver, '_check_share_access')
def test_allow_access_ro(
self,
mock_check_share_access,
mock_get_timestamp_from_vol_name,
mock_gen_host_name):
"""Test allow_access with access type ro."""
fake_access = fakes.AccessClass('fakeAccessType', 'ro', 'fakeIp')
mock_private_storage = mock.Mock()
mock_private_storage.get.return_value = 'fakeVolName'
mock_api_executor = qnap.QnapShareDriver._create_api_executor
mock_api_executor.return_value.get_host_list.return_value = []
mock_get_timestamp_from_vol_name.return_value = 'fakeHostName'
mock_gen_host_name.return_value = 'manila-fakeHostName-ro'
mock_api_executor.return_value.add_host.return_value = None
mock_api_executor.return_value.set_nfs_access.return_value = None
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.driver._allow_access(
'context', self.share, fake_access, share_server=None)
mock_check_share_access.assert_called_once_with(
'NFS', 'fakeAccessType')
mock_api_executor.return_value.add_host.assert_called_once_with(
'manila-fakeHostName-ro', 'fakeIp')
@mock.patch.object(qnap.QnapShareDriver, '_gen_host_name')
@mock.patch.object(qnap.QnapShareDriver, '_get_timestamp_from_vol_name')
@mock.patch.object(qnap.QnapShareDriver, '_check_share_access')
def test_allow_access_ro_with_hostlist(
self,
mock_check_share_access,
mock_get_timestamp_from_vol_name,
mock_gen_host_name):
"""Test allow_access_ro_with_hostlist."""
host_dict_ips = []
for host in self.get_host_list_return_value():
if host.find('netaddrs/ipv4').text is not None:
host_dict = {
'index': host.find('index').text,
'hostid': host.find('hostid').text,
'name': host.find('name').text,
'ipv4': [host.find('netaddrs').find('ipv4').text]}
host_dict_ips.append(host_dict)
for host in host_dict_ips:
fake_access_to = host['ipv4']
fake_access = fakes.AccessClass(
'fakeAccessType', 'ro', fake_access_to)
mock_private_storage = mock.Mock()
mock_private_storage.get.return_value = 'fakeVolName'
mock_api_executor = qnap.QnapShareDriver._create_api_executor
mock_api_executor.return_value.get_host_list.return_value = (
self.get_host_list_return_value())
mock_get_timestamp_from_vol_name.return_value = 'fakeHostName'
mock_gen_host_name.return_value = 'manila-fakeHostName'
mock_api_executor.return_value.set_nfs_access.return_value = None
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.driver._allow_access(
'context', self.share, fake_access, share_server=None)
mock_check_share_access.assert_called_once_with(
'NFS', 'fakeAccessType')
@mock.patch.object(qnap.QnapShareDriver, '_gen_host_name')
@mock.patch.object(qnap.QnapShareDriver, '_get_timestamp_from_vol_name')
@mock.patch.object(qnap.QnapShareDriver, '_check_share_access')
def test_allow_access_rw_with_hostlist_invalid_access(
self,
mock_check_share_access,
mock_get_timestamp_from_vol_name,
mock_gen_host_name):
"""Test allow_access_rw_invalid_access."""
host_dict_ips = []
for host in self.get_host_list_return_value():
if host.find('netaddrs/ipv4').text is not None:
host_dict = {
'index': host.find('index').text,
'hostid': host.find('hostid').text,
'name': host.find('name').text,
'ipv4': [host.find('netaddrs').find('ipv4').text]}
host_dict_ips.append(host_dict)
for host in host_dict_ips:
fake_access_to = host['ipv4']
fake_access = fakes.AccessClass(
'fakeAccessType', 'rw', fake_access_to)
mock_private_storage = mock.Mock()
mock_private_storage.get.return_value = 'fakeVolName'
mock_api_executor = qnap.QnapShareDriver._create_api_executor
mock_api_executor.return_value.get_host_list.return_value = (
self.get_host_list_return_value())
mock_get_timestamp_from_vol_name.return_value = 'fakeHostName'
mock_gen_host_name.return_value = 'manila-fakeHostName-rw'
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.assertRaises(
exception.InvalidShareAccess,
self.driver._allow_access,
context='context',
share=self.share,
access=fake_access,
share_server=None)
@mock.patch.object(qnap.QnapShareDriver, '_get_timestamp_from_vol_name')
@mock.patch.object(qnap.QnapShareDriver, '_check_share_access')
def test_allow_access_rw(
self,
mock_check_share_access,
mock_get_timestamp_from_vol_name):
"""Test allow_access with access type rw."""
fake_access = fakes.AccessClass('fakeAccessType', 'rw', 'fakeIp')
mock_private_storage = mock.Mock()
mock_private_storage.get.return_value = 'fakeVolName'
mock_api_executor = qnap.QnapShareDriver._create_api_executor
mock_api_executor.return_value.get_host_list.return_value = []
mock_get_timestamp_from_vol_name.return_value = 'fakeHostName'
mock_api_executor.return_value.add_host.return_value = None
mock_api_executor.return_value.set_nfs_access.return_value = None
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.driver._allow_access(
'context', self.share, fake_access, share_server=None)
mock_check_share_access.assert_called_once_with(
'NFS', 'fakeAccessType')
mock_api_executor.return_value.add_host.assert_called_once_with(
'manila-fakeHostName-rw', 'fakeIp')
@mock.patch.object(qnap.QnapShareDriver, '_gen_host_name')
@mock.patch.object(qnap.QnapShareDriver, '_check_share_access')
def test_allow_access_ro_without_hostlist(
self,
mock_check_share_access,
mock_gen_host_name):
"""Test allow access without host list."""
fake_access = fakes.AccessClass('fakeAccessType', 'ro', 'fakeIp')
mock_private_storage = mock.Mock()
mock_api_executor = qnap.QnapShareDriver._create_api_executor
mock_api_executor.return_value.get_host_list.return_value = None
mock_gen_host_name.return_value = 'fakeHostName'
mock_api_executor.return_value.add_host.return_value = None
mock_api_executor.return_value.set_nfs_access.return_value = None
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
share_name = self.driver._gen_random_name('share')
mock_private_storage.get.return_value = share_name
self.driver._allow_access(
'context', self.share, fake_access, share_server=None)
mock_check_share_access.assert_called_once_with(
'NFS', 'fakeAccessType')
mock_api_executor.return_value.add_host.assert_called_once_with(
'fakeHostName', 'fakeIp')
@mock.patch.object(qnap.QnapShareDriver, '_get_vol_host')
@mock.patch.object(qnap.QnapShareDriver, '_gen_host_name')
@mock.patch.object(qnap.QnapShareDriver, '_get_timestamp_from_vol_name')
@mock.patch.object(qnap.QnapShareDriver, '_check_share_access')
def test_deny_access_with_hostlist(
self,
mock_check_share_access,
mock_get_timestamp_from_vol_name,
mock_gen_host_name,
mock_get_vol_host):
"""Test deny access."""
host_dict_ips = []
for host in self.get_host_list_return_value():
if host.find('netaddrs/ipv4').text is not None:
host_dict = {
'index': host.find('index').text,
'hostid': host.find('hostid').text,
'name': host.find('name').text,
'ipv4': [host.find('netaddrs').find('ipv4').text]}
host_dict_ips.append(host_dict)
for host in host_dict_ips:
fake_access_to = host['ipv4'][0]
fake_access = fakes.AccessClass('fakeAccessType', 'ro', fake_access_to)
mock_private_storage = mock.Mock()
mock_private_storage.get.return_value = 'vol_name'
mock_api_return = (
qnap.QnapShareDriver._create_api_executor.return_value)
mock_api_return.get_host_list.return_value = (
self.get_host_list_return_value())
mock_get_timestamp_from_vol_name.return_value = 'fakeTimeStamp'
mock_gen_host_name.return_value = 'manila-fakeHostName'
mock_get_vol_host.return_value = host_dict_ips
mock_api_return.add_host.return_value = None
mock_api_return.set_nfs_access.return_value = None
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.driver._deny_access(
'context', self.share, fake_access, share_server=None)
mock_check_share_access.assert_called_once_with(
'NFS', 'fakeAccessType')
@mock.patch.object(qnap.QnapShareDriver, '_get_timestamp_from_vol_name')
@mock.patch.object(qnap.QnapShareDriver, '_check_share_access')
def test_deny_access_with_hostlist_not_equel_access_to(
self,
mock_check_share_access,
mock_get_timestamp_from_vol_name):
"""Test deny access."""
fake_access = fakes.AccessClass('fakeAccessType', 'ro', 'fakeIp')
mock_private_storage = mock.Mock()
mock_private_storage.get.return_value = 'vol_name'
mock_api_return = (
qnap.QnapShareDriver._create_api_executor.return_value)
mock_api_return.get_host_list.return_value = (
self.get_host_list_return_value())
mock_api_return.add_host.return_value = None
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.driver._deny_access(
'context', self.share, fake_access, share_server=None)
mock_check_share_access.assert_called_once_with(
'NFS', 'fakeAccessType')
@mock.patch.object(qnap.QnapShareDriver, '_get_timestamp_from_vol_name')
@mock.patch.object(qnap.QnapShareDriver, '_check_share_access')
def test_deny_access_without_hostlist(
self,
mock_check_share_access,
mock_get_timestamp_from_vol_name):
"""Test deny access without hostlist."""
fake_access = fakes.AccessClass('fakeAccessType', 'ro', 'fakeIp')
mock_private_storage = mock.Mock()
mock_private_storage.get.return_value = 'fakeVolName'
mock_api_executor = qnap.QnapShareDriver._create_api_executor
mock_api_executor.return_value.get_host_list.return_value = None
mock_get_timestamp_from_vol_name.return_value = 'fakeHostName'
mock_api_executor.return_value.add_host.return_value = None
mock_api_executor.return_value.set_nfs_access.return_value = None
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.driver._deny_access(
'context', self.share, fake_access, share_server=None)
mock_check_share_access.assert_called_once_with(
'NFS', 'fakeAccessType')
@ddt.data('NFS', 'CIFS', 'proto')
def test_check_share_access(self, test_proto):
"""Test check_share_access."""
mock_private_storage = mock.Mock()
mock_api_executor = qnap.QnapShareDriver._create_api_executor
mock_api_executor.return_value.get_host_list.return_value = None
mock_api_executor.return_value.add_host.return_value = None
mock_api_executor.return_value.set_nfs_access.return_value = None
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', 'Storage Pool 1',
private_storage=mock_private_storage)
self.assertRaises(
exception.InvalidShareAccess,
self.driver._check_share_access,
share_proto=test_proto,
access_type='notser')
def test_get_ts_model_pool_id(self):
"""Test get ts model pool id."""
mock_private_storage = mock.Mock()
self._do_setup('http://1.2.3.4:8080', '1.2.3.4', 'admin',
'qnapadmin', '1',
private_storage=mock_private_storage)
self.assertEqual('1', self.driver._get_ts_model_pool_id('1'))
| {
"content_hash": "734750a3d280ea2420265b859b29c150",
"timestamp": "",
"source": "github",
"line_count": 1510,
"max_line_length": 79,
"avg_line_length": 42.4158940397351,
"alnum_prop": 0.5932113414938796,
"repo_name": "bswartz/manila",
"id": "8e5974a718c3b18b169cbd19696a4fb4e91b7ce8",
"size": "64687",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "manila/tests/share/drivers/qnap/test_qnap.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "953"
},
{
"name": "Python",
"bytes": "9952105"
},
{
"name": "Shell",
"bytes": "106606"
}
],
"symlink_target": ""
} |
import json
import socket
import atexit
import datetime
import time
import re
import random
import psycopg2
from flask import Flask, Response
from flask_limiter import Limiter
from flask import render_template, request, g, session, send_from_directory, jsonify
app = Flask(__name__)
limiter = Limiter(app, global_limits=["500 per minute"])
#
# TODO: take into consideration db restarts. as a result
# db connections might need to be re-established.
#
# testing: seq 1 1000 | xargs -I% curl "127.0.0.1:8082/search?query=php & perl"
#
#
global dbconn
dbconn = psycopg2.connect("dbname=news user=user password=test1 host=127.0.0.1 application_name='news-web'")
with open('sql/search_simple.sql', 'r') as f:
query_search = f.read()
with open('sql/get_city_events.sql', 'r') as f:
query_city_events = f.read()
def db_close():
dbconn.close()
atexit.register(db_close)
@app.route("/")
def index():
return render_template('base.html')
# using a more familiar query syntax
#
# Examples:
# * AND query (& in Pg) : kw1 kw2
# * AND NOT query(&! in Pg): kw1 -kw2
#
def convert_query(q):
q1 = ""
es = re.split('\s+',q)
print "kw: ", es
if len(es) > 1:
q1 = es[0]
for ei in range(1,len(es)):
e = es[ei]
m = re.search('^-(.*)$', e)
if m:
q1 += ' & !' + m.group(1)
else:
q1 += ' & ' + e
else:
q1 = q
print "pg query: ",q1
return q1
@app.route("/search", methods=['GET'])
@limiter.limit("300 per minute")
def search_blogs():
global dbconn
# Note: basically a-zA-Z0-9 ,spaces,comma,semi-colo,underscore, & | and ! would be enough
re_filter = u"\p{Latin}+"
re_char_blist = u"['\"&\!]+"
import urllib
query_ = request.args.get('query','').lower()
query_ = re.sub(re_filter,'', query_)
query_ = re.sub(re_char_blist,' ', query_)
print "keywords: ",query_
query_ = convert_query(query_)
results = []
with dbconn:
with dbconn.cursor() as curs:
curs.execute(query_search, {'fts_query': query_})
results = curs.fetchall()
# build response
j_results = {'results': []}
for r in results:
j_results['results'].append(r)
resp = Response(json.dumps(j_results), status=200, mimetype='application/json')
# resp.headers['Access-Control-Allow-Origin'] = '*'
return resp
@app.route("/geoactive", methods=['GET'])
@limiter.limit("300 per minute")
def geoactive():
results = []
with dbconn:
with dbconn.cursor() as curs:
curs.execute('SELECT * FROM mv_active_cities;')
res = curs.fetchall()
for r in res:
results.append({
'city_name': r[0],
'lat': r[1],
'lng': r[2],
'id': r[3]
})
resp = Response(json.dumps(results), status=200, mimetype='application/json')
# resp.headers['Access-Control-Allow-Origin'] = '*'
return resp
@app.route("/get_city_events", methods=['GET'])
@limiter.limit("300 per minute")
def get_city_events():
results = []
city_id = request.args.get('id','').lower()
city_id = re.sub(r'[^0-9]','', city_id)
with dbconn:
with dbconn.cursor() as curs:
curs.execute(query_city_events, {'geonameid': city_id})
res = curs.fetchall()
for r in res:
results.append({
'name': r[0],
'url': r[1],
'start_time': r[2]
})
resp = Response(json.dumps(results), status=200, mimetype='application/json')
# resp.headers['Access-Control-Allow-Origin'] = '*'
return resp
if __name__ == "__main__":
# running in non-WSGI context
app.run(host="0.0.0.0", port=5000, debug=False)
| {
"content_hash": "0c9a526ca20f004310126a5a8b49a6c0",
"timestamp": "",
"source": "github",
"line_count": 146,
"max_line_length": 108,
"avg_line_length": 26.643835616438356,
"alnum_prop": 0.5604113110539846,
"repo_name": "wsdookadr/news",
"id": "15ad8d78a717ffcb4d88699c96e9ab1b1b31a738",
"size": "3912",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "web.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "3"
},
{
"name": "CSS",
"bytes": "3976"
},
{
"name": "HTML",
"bytes": "11233"
},
{
"name": "JavaScript",
"bytes": "3257"
},
{
"name": "PLpgSQL",
"bytes": "15803"
},
{
"name": "Python",
"bytes": "47955"
},
{
"name": "Shell",
"bytes": "5116"
},
{
"name": "XQuery",
"bytes": "3091"
}
],
"symlink_target": ""
} |
from . import PGui
import weakref
class PControllerBlock(PGui):
_PControllerBlockClass = None
def __init__(self):
self._pControllerBlock = self._PControllerBlockClass(self)
self._workermanager = None
self._pwindow = None
self._workerid = None
self._block = None
def set_workermanager(self, workermanager):
self._workermanager = weakref.ref(workermanager)
def set_workerinstancemanager(self, workerinstancemanager):
self._wim = weakref.ref(workerinstancemanager)
def set_pwindow(self, pwindow):
self._pwindow = weakref.ref(pwindow)
def show(self):
self._pControllerBlock.show()
def hide(self):
self._pControllerBlock.hide()
self._workerid = None
def load_paramset(self, workerid):
manager = self._workermanager()
block = manager.get_block(workerid)
self._block = block
blockvalues = None
try:
instance = self._wim().get_workerinstance(workerid)
blockvalues = instance.curr_blockvalues
except KeyError:
pass
self._workerid = workerid
cont = self._pControllerBlock
if self._block is None:
cont.set_blocktype(None)
cont.set_blockstrings(None)
cont.set_blockvalues(None)
else:
cont.set_blocktype(self._block.spydertype)
cont.set_blockstrings(self._block.tree.keys())
cont.set_blockvalues(blockvalues)
def update_paramvalues(self, blockvalues):
cont = self._pControllerBlock
cont.set_blockvalues(blockvalues)
def set_paramvalues(self, blockvalues):
ok = self._update_blockvalues(blockvalues)
assert ok is True
self.update_paramvalues(blockvalues)
self._wim().worker_update_blockvalues(self._workerid, blockvalues)
def gui_updates_blockvalues(self, blockvalues):
ok = self._update_blockvalues(blockvalues)
if ok:
self._wim().worker_update_blockvalues(self._workerid, blockvalues)
return ok
def _update_blockvalues(self, blockvalues):
if self._block is None:
assert blockvalues is None or blockvalues == [""] * len(blockvalues)
return
blockstrings = self._block.tree.keys()
workerid = self._workerid
io = self._block.io
# Check that the name of the new blockvalues are valid
for s in blockvalues:
if s != "" and s not in blockvalues: return False
#Make sure that you can't remove block antennas/outputs with connections
instance = self._wim().get_workerinstance(workerid)
old_blockvalues = instance.curr_blockvalues
if old_blockvalues is not None:
newvalues = list(blockvalues)
for n in range(len(newvalues), len(old_blockvalues)):
newvalues.append(None)
has_connection = self._wim().has_connection
for old, new in zip(old_blockvalues, newvalues):
if not old: continue
if not new or new != old:
if has_connection(workerid, io, old):
self.update_paramvalues(old_blockvalues)
return False
return True
def p(self):
return self._pControllerBlock
| {
"content_hash": "613c4b6506ca90d6e4c9a8685d25c78f",
"timestamp": "",
"source": "github",
"line_count": 100,
"max_line_length": 80,
"avg_line_length": 33.63,
"alnum_prop": 0.6122509663990485,
"repo_name": "agoose77/hivesystem",
"id": "ec02b7a788b19e87821b16640430bdf5aeef0555",
"size": "3363",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hiveguilib/PGui/PControllerBlock.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "2491478"
},
{
"name": "Shell",
"bytes": "1164"
}
],
"symlink_target": ""
} |
from sys import argv
script, filename = argv
txt = open(filename)
print("Here's your file {!r}:".format(filename))
print(txt.read())
print("Type the filename again:")
file_again = input("> ")
txt_again = open(file_again)
print(txt_again.read())
txt.close()
txt_again.close()
| {
"content_hash": "c7992da89ecc29557f5c9ccb489af7fb",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 48,
"avg_line_length": 15.666666666666666,
"alnum_prop": 0.6843971631205674,
"repo_name": "alexliew/learn_python_the_hard_way",
"id": "bbb71393e246ae0dbb796c6c212389593813b488",
"size": "282",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ex15.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "64890"
}
],
"symlink_target": ""
} |
import _plotly_utils.basevalidators
class SizeValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(self, plotly_name="size", parent_name="surface.contours.x", **kwargs):
super(SizeValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
min=kwargs.pop("min", 0),
**kwargs,
)
| {
"content_hash": "8f547109a7ea9048e97b6ed20b76c84d",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 87,
"avg_line_length": 36.416666666666664,
"alnum_prop": 0.6018306636155606,
"repo_name": "plotly/plotly.py",
"id": "2cee0c21065438cbd28c6e2cce9f08a306be8452",
"size": "437",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/surface/contours/x/_size.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "545"
},
{
"name": "JavaScript",
"bytes": "2074"
},
{
"name": "PostScript",
"bytes": "565328"
},
{
"name": "Python",
"bytes": "31506317"
},
{
"name": "TypeScript",
"bytes": "71337"
}
],
"symlink_target": ""
} |
import time
EVENT_TYPES={
"dummy":"ps:testing:dummy"
}
class Probe:
"""
Dummy probe that just sleeps and returns 1
"""
def __init__(self, service, measurement):
self.config = measurement["configuration"]
self.duration = self.config.get("schedule_params", {}).get("duration", 0)
def get_data(self):
time.sleep(self.duration)
return {EVENT_TYPES["dummy"]: 1}
| {
"content_hash": "1f4e50d86f55ede40cabf87ed5a91212",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 81,
"avg_line_length": 22,
"alnum_prop": 0.6100478468899522,
"repo_name": "periscope-ps/blipp",
"id": "943b3f02b5809012fae0f1ddb09bba60bd0f3709",
"size": "932",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "blipp/dummy.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "207513"
},
{
"name": "Shell",
"bytes": "5186"
}
],
"symlink_target": ""
} |
import numpy as np
import random
class Agent(object):
""" Generic Reinforcement learner. Will use Q learning, dyna Q, and some custom additions.
(Initially based on the template for the Machine Learning for Trading course, by Tucker Balch)"""
def __init__(self,
num_states,
num_actions,
alpha=0.2,
gamma=0.9,
random_actions_rate=0.9,
random_actions_decrease=0.999,
dyna_iterations=0,
verbose=False,
name='NN'):
self.verbose = verbose
# Dimensions of the problem
self.num_states = num_states
self.num_actions = num_actions
# Parameters
self.alpha = alpha
self.gamma = gamma
self.random_actions_rate = random_actions_rate
self.random_actions_decrease = random_actions_decrease
self.dyna_iterations = dyna_iterations
# Initialization
self.s = 0
self.a = 0
self.Q = 1.0 - 2*np.random.rand(num_states, num_actions)
# QExplore keeps track of how many times the (s,a) pair was visited (with Q update)
self.QExplore = np.ones((num_states, num_actions))
# T and R for the hallucination models
# Probabilities of transition
self.T = 0.00001*np.ones((num_states, num_actions, num_states))
# Expected immediate reward
self.R = 1.0 - 2*np.random.rand(num_states, num_actions)
self.name = name
def random_action(self, s, actions=None):
"""
This function chooses a random action, but not uniformly.
It addresses the problem that a totally random exploration is very slow.
So it keeps track of the explored (state,action) pairs and looks for new things to do.
:param s: the current state
:param actions: A list of possible actions
:return: action
"""
if actions is None:
actions = range(self.num_actions)
probas = 1/self.QExplore[s, actions]
# Normalize
probas /= np.sum(probas)
action = np.random.choice(actions, p=probas)
# action = random.randint(0, self.num_actions-1)
return action
def choose_action(self, s):
"""
Chooses an action. With "random_actions_rate" probability it returns a random action.
If it doesn't, then it returns the best option from the Q table.
It doesnt' update the Q table nor the random_actions_rate variable.
:param s: is the current state
:return: action
"""
do_explore = (random.random() < self.random_actions_rate)
if do_explore:
action = self.random_action(s)
else:
actions = range(self.num_actions)
max_q = np.max(self.Q[s])
# Now, get all the actions that have Q == maxQ
optimal_actions = []
for action_temp in actions:
if self.Q[s, action_temp] == max_q:
optimal_actions.append(action_temp)
# Choose one of the optimal choices, at random
# (I could use the QExplore to choose also...)
action = random.choice(optimal_actions)
return action
def hallucinate(self, s):
# Initialize the hallucinating states and actions (the real ones shouldn't change)
# Should hallucinations be more random?? To test later...
# h_radom_actions_rate = self.random_actions_rate
h_s = s
for i in range(self.dyna_iterations):
# Get new action
h_a = self.choose_action(h_s)
# Simulate transitions and rewards
transition_probas = self.T[h_s, h_a, :]/np.sum(self.T[h_s, h_a, :])
h_s_prime = np.random.choice(range(self.num_states), p=transition_probas)
h_r = self.R[h_s, h_a]
# Update Q
# Get the best Q for h_s'
max_q_prime = np.max(self.Q[h_s_prime])
# Now use the formula to update Q
self.Q[h_s, h_a] = (1-self.alpha)*self.Q[h_s, h_a] + \
self.alpha*(h_r + self.gamma * max_q_prime)
# Update the state
h_s = h_s_prime
def play_learned_response(self, new_state):
"""
This function does the same as "play", but without updating the Q table. Given a new state, it chooses an action
according to the best learned policy, so far.
It does update the state.
:param new_state: The resulting state for the previous action, or the state that was externally set.
:returns: The chosen action
"""
# Choose an action
action = self.choose_action(new_state)
# Update the state and action
self.s = new_state
self.a = action
if self.verbose:
print("s =", new_state, "a =", action)
return action
def play(self, reward, new_state):
"""
Given a new state, and a reward for the previous action,
chooses an action, updating the Q table in the process.
:param new_state: The resulting state for the previous action.
:param reward: The reward for the previous action.
:returns: The chosen action.
"""
# Update Q ------------------------------------------
# Get the best Q for s'
maxQprime = np.max(self.Q[new_state])
# Now use the formula to update Q
self.Q[self.s, self.a] = (1-self.alpha)*self.Q[self.s, self.a] + \
self.alpha*(reward + self.gamma * maxQprime)
# Hallucinate some experience...
# Update T
self.T[self.s, self.a, new_state] += 1
# Update R
self.R[self.s, self.a] = (1-self.alpha)*self.R[self.s, self.a] + self.alpha * reward
# Hallucinate!
self.hallucinate(new_state)
# End of Update Q -----------------------------------
# Choose an action and then update random_action_rate (!)
action = self.choose_action(new_state)
self.random_actions_rate *= self.random_actions_decrease
# Update the state and action
self.s = new_state
self.a = action
# Update QExplore
self.QExplore[new_state, action] += 1.0
# Print some debugging messages
if self.verbose:
print("s = {} a = {} reward = {}".format(new_state, action, reward))
return action
def __str__(self):
return self.name
__repr__ = __str__
| {
"content_hash": "97967ec3b4350044bf15d9acfd93a0bb",
"timestamp": "",
"source": "github",
"line_count": 188,
"max_line_length": 120,
"avg_line_length": 34.909574468085104,
"alnum_prop": 0.5663568490019808,
"repo_name": "mtasende/Machine-Learning-Nanodegree-Capstone",
"id": "dbfe19bb115a1d3056d3f43014c623ca50b9ba0b",
"size": "6563",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "recommender/agent.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "30219877"
},
{
"name": "Jupyter Notebook",
"bytes": "27049321"
},
{
"name": "Python",
"bytes": "92465"
}
],
"symlink_target": ""
} |
NOTIFICATIONS_MAPPING = {
'notifications_get': {
'resource': 'notifications.json',
'docs': ('http://docs.discourse.org/#tag/'
'Notifications%2Fpaths%2F~1notifications.json%2Fget'),
'methods': ['GET'],
},
}
| {
"content_hash": "b5b193320cf63240740ea8b69f32d1b2",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 71,
"avg_line_length": 31.875,
"alnum_prop": 0.5607843137254902,
"repo_name": "humrochagf/tapioca-discourse",
"id": "d1845796686fc9299f212635c6fbef2aad969df8",
"size": "280",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tapioca_discourse/resource_mapping/notifications.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1539"
},
{
"name": "Python",
"bytes": "27888"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from celery import current_task
from time import sleep
from datetime import datetime
from celeryproj.celery import celery
from formdata.utils.filing_body_processor import process_filing_body
from formdata.utils.dump_utils import dump_filing_sked, dump_committee_sked, dump_candidate_sked
import sys
import os
import redis
sys.path.append('../../')
from fecreader.settings import CUSTOM_DOWNLOAD_DIR, CUSTOM_DOWNLOAD_URL
CACHE_TTL = 3600
CACHE_DB = 'files'
r = redis.StrictRedis(host='localhost', port=6379, db=CACHE_DB)
def set_cachekey(key, value):
r.set(key, value)
r.expire(key, CACHE_TTL)
def gzip_file(destination_file):
gzip_cmd = "gzip -f %s" % (destination_file)
filename_zipped = destination_file + ".gz"
proc = os.system(gzip_cmd)
return filename_zipped
@celery.task
def dump_filing_sked_celery(sked_name, filing_number):
cache_key = "%s_sked%s" % (filing_number, sked_name)
result = r.get(cache_key)
if result:
print "file cache hit: " + cache_key
return result
print "file cache miss: " + cache_key
this_request_id = str(dump_filing_sked_celery.request.id)
this_request_id = this_request_id.replace("-", "")
filename = "filing%ssked%s_%s.csv" % (filing_number, sked_name, this_request_id)
destination_file = CUSTOM_DOWNLOAD_DIR + "/" + filename
destination_url = CUSTOM_DOWNLOAD_URL + "/" + filename
dump_filing_sked(sked_name, filing_number, destination_file)
gzip_file(destination_file)
destination_url = destination_url + ".gz"
set_cachekey(cache_key, destination_url)
return destination_url
@celery.task
def dump_committee_sked_celery(cycle, sked_name, committee_number):
cache_key = "%s_sked%s" % (committee_number, sked_name)
result = r.get(cache_key)
if result:
print "file cache hit: " + cache_key
return result
this_request_id = dump_committee_sked_celery.request.id
this_request_id = this_request_id.replace("-", "")
filename = "%ssked%s_%s_%s.csv" % (committee_number, sked_name, cycle, this_request_id)
destination_file = CUSTOM_DOWNLOAD_DIR + "/" + filename
destination_url = CUSTOM_DOWNLOAD_URL + "/" + filename
dump_committee_sked(cycle, sked_name, committee_number, destination_file)
gzip_file(destination_file)
destination_url = destination_url + ".gz"
set_cachekey(cache_key, destination_url)
return destination_url
@celery.task
def dump_candidate_sked_celery(cycle, sked_name, candidate_id):
cache_key = "%s_sked%s" % (candidate_id, sked_name)
result = r.get(cache_key)
if result:
print "file cache hit: " + cache_key
return result
this_request_id = dump_candidate_sked_celery.request.id
this_request_id = this_request_id.replace("-", "")
filename = "%ssked%s_%s_%s.csv" % (candidate_id, sked_name, cycle, this_request_id)
destination_file = CUSTOM_DOWNLOAD_DIR + "/" + filename
destination_url = CUSTOM_DOWNLOAD_URL + "/" + filename
dump_candidate_sked(cycle, sked_name, candidate_id, destination_file)
gzip_file(destination_file)
destination_url = destination_url + ".gz"
set_cachekey(cache_key, destination_url)
return destination_url
@celery.task
def process_filing_body_celery(filingnum):
process_filing_body(filingnum)
@celery.task
def add(x, sked):
print('Executing task id %r, args: %r kwargs: %r' % (
add.request.id, add.request.args, add.request.kwargs))
print('sleeping for 10 seconds')
sleep(10)
return x + 1
| {
"content_hash": "0ff34dac4e40d137e001ba2dd545a871",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 96,
"avg_line_length": 34.08571428571429,
"alnum_prop": 0.6851075719474714,
"repo_name": "sunlightlabs/read_FEC",
"id": "5fdd4149826a0db7b65f92dc3cfd602542db325a",
"size": "3579",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fecreader/celeryproj/tasks.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "27432"
},
{
"name": "HTML",
"bytes": "357960"
},
{
"name": "JavaScript",
"bytes": "129989"
},
{
"name": "Python",
"bytes": "1881514"
},
{
"name": "Shell",
"bytes": "10604"
}
],
"symlink_target": ""
} |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Windows.Forms;
namespace SeriesNamer
{
static class Program
{
/// <summary>
/// The main entry point for the application.
/// </summary>
[STAThread]
static void Main()
{
Application.EnableVisualStyles();
Application.SetCompatibleTextRenderingDefault(false);
Application.Run(new Main());
}
}
}
| {
"content_hash": "afe817bac6a09a0a23810af4113b00e7",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 65,
"avg_line_length": 22.80952380952381,
"alnum_prop": 0.5970772442588727,
"repo_name": "madeso/prettygood",
"id": "fa249185d3186620b33827dc5e2897d3e9c44ddd",
"size": "481",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dotnet/SeriesNamer/Program.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "384285"
}
],
"symlink_target": ""
} |
"""
conpaas.core.clouds.dummy
=========================
ConPaaS core: Dummy cloud IaaS code.
:copyright: (C) 2010-2013 by Contrail Consortium.
"""
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
from .base import Cloud
class DummyCloud(Cloud):
'''Support for "dummy" clouds'''
def __init__(self, cloud_name, iaas_config):
Cloud.__init__(self, cloud_name)
def get_cloud_type(self):
return 'dummy'
def _connect(self):
'''Connect to dummy cloud'''
DummyDriver = get_driver(Provider.DUMMY)
self.driver = DummyDriver(0)
self.connected = True
def config(self, config_params={}, context=None):
if context is not None:
self.cx = context
def new_instances(self, count, name='conpaas', inst_type=None):
if not self.connected:
self._connect()
return [self._create_service_nodes(self.driver.create_node(), False)
for _ in range(count)]
def kill_instance(self, node):
'''Kill a VM instance.
@param node: A ServiceNode instance, where node.id is the vm_id
'''
if self.connected is False:
raise Exception('Not connected to cloud')
# destroy_node does not work properly in libcloud's dummy
# driver. Just return True.
return True
| {
"content_hash": "3f12eba6d63cb1b38635a9477fd8176d",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 76,
"avg_line_length": 26.903846153846153,
"alnum_prop": 0.6047176554681916,
"repo_name": "mihaisoloi/conpaas",
"id": "1ad65e70cb729c0b6bae4d9b641ba7fd9ef04d10",
"size": "1424",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "conpaas-services/src/conpaas/core/clouds/dummy.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Java",
"bytes": "399657"
},
{
"name": "JavaScript",
"bytes": "105439"
},
{
"name": "PHP",
"bytes": "1811901"
},
{
"name": "Python",
"bytes": "2047262"
},
{
"name": "Shell",
"bytes": "136501"
}
],
"symlink_target": ""
} |
"""A set of Python 3 function annotation tools supporting Python 2.4 and up.
* Non-declarative syntax. (Decorators or annotations.)
* No i18n.
* Zero docstrings; comments in Chinese.
* Single monolithic file.
* README is only documentation.
* Estimates: 310 SLoC…
"""
from __future__ import unicode_literals
from boxmongodb import boxmongodb
# From the readme. (Copy/pasted. Really.)
class mongoMyAnBox_counter(boxmongodb.Model):
username = boxmongodb.StringProperty()
num = boxmongodb.IntegerProperty(default="1")
def main():
a = "CcdjhMarx"
b = "1984"
m = mongoMyAnBox_counter(username=a,num=b)
m.insert()
# Provides
[ # all support 'default'
'boxmongodb.StringProperty',
'boxmongodb.DateTimeProperty', # auto_now
'boxmongodb.IntegerProperty',
'boxmongodb.LinkProperty',
'boxmongodb.AuthProperty', # never, ever use this (defaults to unixts+sample(10)+sample(10) alnum!)
'boxmongodb.DictProperty',
'boxmongodb.EmailProperty'
]
| {
"content_hash": "867edf6d97983c815da64f059e635ccc",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 101,
"avg_line_length": 24.275,
"alnum_prop": 0.729145211122554,
"repo_name": "marrow/schema",
"id": "f37d5d912a05bb8111d0e05869c9c652723b32c8",
"size": "992",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "example/thirdparty/boxmongodb.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "655"
},
{
"name": "Python",
"bytes": "108078"
},
{
"name": "Shell",
"bytes": "299"
}
],
"symlink_target": ""
} |
import glob, os, sys
from collections import Counter
def load_files(folder):
"""Load all text files in folder and return character histogram data and
files contaning blocked characters."""
print("Working on %s" % folder)
# List files containing these chars
blockchars = "©£€»þ"
blockfiles = {}
for char in blockchars:
blockfiles[char] = []
# count chars in txt files
c = Counter()
for file in glob.glob(folder + "/*/*.txt"):
with open(file) as f:
for line in f:
c += Counter(line)
for char in line:
if char in blockchars:
blockfiles[char].append(file)
return c, blockfiles
if __name__ == "__main__":
c, blockfiles = load_files(sys.argv[1])
print("Total chars %s" % sum(c.values()))
for char, count in c.most_common():
print(char, count)
for item in blockfiles:
print(item)
| {
"content_hash": "c17e6177eaa1fb5a2e51c465fa73b4fb",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 76,
"avg_line_length": 23.5609756097561,
"alnum_prop": 0.567287784679089,
"repo_name": "Kungbib/CIPAC",
"id": "c15bf7cdad52a9bf4ec8ad5614d5f9ee35859de5",
"size": "1052",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/char-histogram.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "3932"
},
{
"name": "HTML",
"bytes": "171920"
},
{
"name": "JavaScript",
"bytes": "564"
},
{
"name": "Perl",
"bytes": "4929"
},
{
"name": "Python",
"bytes": "158144"
},
{
"name": "Shell",
"bytes": "5787"
}
],
"symlink_target": ""
} |
from tempest.api.volume import base
from tempest.common.utils import data_utils
from tempest.openstack.common import log as logging
from tempest.test import attr
LOG = logging.getLogger(__name__)
class VolumesSnapshotTest(base.BaseVolumeTest):
_interface = "json"
@classmethod
def setUpClass(cls):
super(VolumesSnapshotTest, cls).setUpClass()
try:
cls.volume_origin = cls.create_volume()
except Exception:
LOG.exception("setup failed")
cls.tearDownClass()
raise
@classmethod
def tearDownClass(cls):
super(VolumesSnapshotTest, cls).tearDownClass()
@attr(type='gate')
def test_snapshot_create_get_list_update_delete(self):
# Create a snapshot
s_name = data_utils.rand_name('snap')
snapshot = self.create_snapshot(self.volume_origin['id'],
display_name=s_name)
# Get the snap and check for some of its details
resp, snap_get = self.snapshots_client.get_snapshot(snapshot['id'])
self.assertEqual(200, resp.status)
self.assertEqual(self.volume_origin['id'],
snap_get['volume_id'],
"Referred volume origin mismatch")
# Compare also with the output from the list action
tracking_data = (snapshot['id'], snapshot['display_name'])
resp, snaps_list = self.snapshots_client.list_snapshots()
self.assertEqual(200, resp.status)
snaps_data = [(f['id'], f['display_name']) for f in snaps_list]
self.assertIn(tracking_data, snaps_data)
# Updates snapshot with new values
new_s_name = data_utils.rand_name('new-snap')
new_desc = 'This is the new description of snapshot.'
resp, update_snapshot = \
self.snapshots_client.update_snapshot(snapshot['id'],
display_name=new_s_name,
display_description=new_desc)
# Assert response body for update_snapshot method
self.assertEqual(200, resp.status)
self.assertEqual(new_s_name, update_snapshot['display_name'])
self.assertEqual(new_desc, update_snapshot['display_description'])
# Assert response body for get_snapshot method
resp, updated_snapshot = \
self.snapshots_client.get_snapshot(snapshot['id'])
self.assertEqual(200, resp.status)
self.assertEqual(new_s_name, updated_snapshot['display_name'])
self.assertEqual(new_desc, updated_snapshot['display_description'])
# Delete the snapshot
self.snapshots_client.delete_snapshot(snapshot['id'])
self.assertEqual(200, resp.status)
self.snapshots_client.wait_for_resource_deletion(snapshot['id'])
self.snapshots.remove(snapshot)
@attr(type='gate')
def test_volume_from_snapshot(self):
# Create a temporary snap using wrapper method from base, then
# create a snap based volume, check resp code and deletes it
snapshot = self.create_snapshot(self.volume_origin['id'])
# NOTE(gfidente): size is required also when passing snapshot_id
resp, volume = self.volumes_client.create_volume(
size=1,
snapshot_id=snapshot['id'])
self.assertEqual(200, resp.status)
self.volumes_client.wait_for_volume_status(volume['id'], 'available')
self.volumes_client.delete_volume(volume['id'])
self.volumes_client.wait_for_resource_deletion(volume['id'])
self.clear_snapshots()
@attr(type='gate')
def test_larger_volume_from_snapshot(self):
# Create a temporary snap using wrapper method from base, then
# create a snap based volume where volume size > snap size,
# check resp code and size and deletes it
snapshot = self.create_snapshot(self.volume_origin['id'])
vol_size = int(self.volume_origin['size']) + 1
resp, volume = self.volumes_client.create_volume(
size=vol_size,
snapshot_id=snapshot['id'])
self.assertEqual(200, resp.status)
self.assertEqual(vol_size, int(volume['size']))
self.volumes_client.wait_for_volume_status(volume['id'], 'available')
self.volumes_client.delete_volume(volume['id'])
self.volumes_client.wait_for_resource_deletion(volume['id'])
self.clear_snapshots()
class VolumesSnapshotTestXML(VolumesSnapshotTest):
_interface = "xml"
| {
"content_hash": "d5a65ef48d3719e0d5a488c4c53e992e",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 79,
"avg_line_length": 43.21904761904762,
"alnum_prop": 0.6313353900396651,
"repo_name": "adkerr/tempest",
"id": "895e69007d4f42058a4ea3c9515c3d4c34fb4d7e",
"size": "5156",
"binary": false,
"copies": "1",
"ref": "refs/heads/netapp/akerr",
"path": "tempest/api/volume/test_volumes_snapshots.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1855736"
},
{
"name": "Shell",
"bytes": "5748"
}
],
"symlink_target": ""
} |
from datetime import date, datetime
import re
from juriscraper.OpinionSite import OpinionSite
from lxml import html
class Site(OpinionSite):
def __init__(self):
super(Site, self).__init__()
self.court_id = self.__module__
self.year = date.today().year
self.regex = re.compile("(S?C\d+-\d+)(.*)")
self.base_path = "//h2[contains(., '{y}')]".format(y=self.year)
self.back_scrape_iterable = range(1999, 2013)
self.url = 'http://www.floridasupremecourt.org/decisions/opinions.shtml'
def _get_case_names(self):
path = '{base}/text()/following::ul[1]//li' \
'//a[not(contains(., "Notice"))][not(contains(., "Rehearing Order"))]'.format(
base=self.base_path)
case_names = []
for e in self.html.xpath(path):
s = ' '.join(e.xpath('.//text()'))
try:
case_name = self.regex.search(s).group(2)
if not case_name.strip():
continue
else:
case_names.append(case_name)
except AttributeError:
pass
return case_names
def _get_download_urls(self):
path = '{base}/text()/following::ul[1]//li' \
'//a[not(contains(., "Notice"))][not(contains(., "Rehearing Order"))]'.format(
base=self.base_path)
urls = []
for e in self.html.xpath(path):
try:
case_name_check = self.regex.search(html.tostring(e, method='text', encoding='unicode')).group(2)
if not case_name_check.strip():
continue
else:
urls.append(e.xpath('@href')[0])
except AttributeError:
pass
return urls
def _get_case_dates(self):
case_dates = []
for e in self.html.xpath(self.base_path):
text = e.xpath("./text()")[0]
text = re.sub('Releases for ', '', text)
case_date = datetime.strptime(text.strip(), '%B %d, %Y').date()
count = 0
for a in e.xpath('./following::ul[1]//li//a[not(contains(., "Notice"))][not(contains(., "Rehearing Order"))]'):
try:
case_name_check = self.regex.search(html.tostring(a, method='text', encoding='unicode')).group(2)
if not case_name_check.strip():
continue
else:
count += 1
except AttributeError:
pass
case_dates.extend([case_date] * count)
return case_dates
def _get_precedential_statuses(self):
return ['Published'] * len(self.case_names)
def _get_docket_numbers(self):
path = '{base}/text()/following::ul[1]//li' \
'//a[not(contains(., "Notice"))][not(contains(., "Rehearing Order"))]'.format(base=self.base_path)
docket_numbers = []
for a in self.html.xpath(path):
try:
case_name_check = self.regex.search(html.tostring(a, method='text', encoding='unicode')).group(2)
if not case_name_check.strip():
continue
else:
docket_numbers.append(self.regex.search(html.tostring(a, method='text', encoding='unicode')).group(1))
except AttributeError:
pass
return docket_numbers
| {
"content_hash": "135300bfcf2ff74d36e89ad41c06d8ce",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 123,
"avg_line_length": 40.701149425287355,
"alnum_prop": 0.4990115786500988,
"repo_name": "brianwc/juriscraper",
"id": "6082f3c40abd548d2fd2e55d5a8e7991aee75aa8",
"size": "3692",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "opinions/united_states/state/fla.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "HTML",
"bytes": "24111143"
},
{
"name": "Python",
"bytes": "661024"
}
],
"symlink_target": ""
} |
import os
import uuid
import somaticseq.utilities.split_Bed_into_equal_regions as split_bed
from somaticseq._version import __version__ as VERSION
from pathlib import Path
DOCKER_IMAGES = { 'somaticseq_image' : 'lethalfang/somaticseq:{}'.format(VERSION),
'scalpel_image' : 'lethalfang/scalpel:0.5.4',
'mutect2_image' : 'broadinstitute/gatk:4.0.5.2',
'muse_image' : 'marghoob/muse:1.0rc_c',
'lofreq_image' : 'lethalfang/lofreq:2.1.3.1-1',
'jsm2_image' : 'lethalfang/jointsnvmix2:0.7.5',
'vardict_image' : 'lethalfang/vardictjava:1.7.0',
'somaticsniper_image' : 'lethalfang/somaticsniper:1.0.5.0-2',
'strelka2_image' : 'lethalfang/strelka:2.9.5',
'bwa_image' : 'lethalfang/bwa:0.7.17_samtools',
'picard_image' : 'lethalfang/picard:2.22.7',
'sambamba_image' : 'lethalfang/sambamba:0.7.1',
'samtools_image' : 'lethalfang/samtools:1.10',
'tabix_image' : 'lethalfang/tabix:1.10',
'picard_image' : 'lethalfang/picard:2.22.7',
'sambamba_image' : 'lethalfang/sambamba:0.7.1',
}
def container_params( container_image, tech='docker', files=[], extra_args='', singularity_image_loc='docker://' ):
file_Paths = [ Path(i) for i in files ]
file_names = [ i.name for i in file_Paths ]
file_dirs = [ i.parent for i in file_Paths ]
file_abs_dirs = [ i.absolute().parent for i in file_Paths ]
random_dirs = [ '/'+uuid.uuid4().hex for i in files ]
fileDict = {}
for file_i, path_i, filename_i, dir_i, abs_dir_i, random_dir_i in zip(files, file_Paths, file_names, file_dirs, file_abs_dirs, random_dirs):
fileDict[ file_i ] = {'filepath': path_i, 'filename': filename_i, 'dir': dir_i, 'abs_dir': abs_dir_i, 'mount_dir': random_dir_i, 'mount_path': os.path.join(random_dir_i, filename_i) }
if tech == 'docker':
MOUNT_STRING = ''
for file_i in fileDict:
sys_dir = fileDict[ file_i ][ 'abs_dir' ]
container_dir = fileDict[ file_i ][ 'mount_dir' ]
MOUNT_STRING = MOUNT_STRING + f' -v {sys_dir}:{container_dir}'
container_string = f'docker run {MOUNT_STRING} -u $(id -u):$(id -g) --rm {extra_args} {container_image}'
elif tech == 'singularity':
MOUNT_STRING = ''
for file_i in fileDict:
sys_dir = fileDict[ file_i ][ 'abs_dir' ]
container_dir = fileDict[ file_i ][ 'mount_dir' ]
MOUNT_STRING = MOUNT_STRING + f' --bind {sys_dir}:{container_dir}'
container_string = f'singularity exec --cleanenv {MOUNT_STRING} {extra_args} {singularity_image_loc}{container_image}'
return container_string, fileDict
| {
"content_hash": "a8dd2c915d4bf233f56f7d9d10ad8de7",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 191,
"avg_line_length": 44.57142857142857,
"alnum_prop": 0.5275641025641026,
"repo_name": "bioinform/somaticseq",
"id": "8cfb3f1be78d5da7bf2dfba4be0bb73720dbccd9",
"size": "3120",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "somaticseq/utilities/dockered_pipelines/container_option.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "10062"
},
{
"name": "Perl",
"bytes": "2078"
},
{
"name": "Python",
"bytes": "655384"
},
{
"name": "R",
"bytes": "13253"
},
{
"name": "Shell",
"bytes": "226113"
}
],
"symlink_target": ""
} |
from django.conf.urls import patterns, url
from app import views
urlpatterns = patterns(
'',
url(r'^$', views.app, name='app'),
)
| {
"content_hash": "b7c768a1ed78177a94744d1537dc5628",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 42,
"avg_line_length": 19.857142857142858,
"alnum_prop": 0.6546762589928058,
"repo_name": "1vasari/Guitar-Practice-App",
"id": "187d9b6f442a9e6333fa0fbfd612ebd5990edd5e",
"size": "140",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gpa/app/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "112"
},
{
"name": "HTML",
"bytes": "2666"
},
{
"name": "JavaScript",
"bytes": "261645"
},
{
"name": "Python",
"bytes": "20565"
}
],
"symlink_target": ""
} |
from flask_marshmallow.fields import URLFor
from marshmallow.fields import Boolean, Method
from mass_flask_api.config import api_blueprint
from mass_flask_api.schemas.base import BaseSchema, ForeignReferenceField
from mass_flask_core.models import AnalysisSystemInstance, AnalysisSystem, ScheduledAnalysis
class AnalysisSystemInstanceSchema(BaseSchema):
url = URLFor('.analysis_system_instance_detail', uuid='<uuid>', _external=True)
analysis_system = ForeignReferenceField(endpoint='.analysis_system_detail', queryset=AnalysisSystem.objects(), query_parameter='identifier_name')
is_online = Boolean()
scheduled_analyses_count = Method("get_scheduled_analyses_count")
class Meta(BaseSchema.Meta):
model = AnalysisSystemInstance
dump_only = [
'id',
'_cls',
'last_seen',
'scheduled_analyses_count'
]
def get_scheduled_analyses_count(self, obj):
analyses_count = ScheduledAnalysis.objects(analysis_system_instance=obj).count()
return analyses_count
api_blueprint.apispec.definition('AnalysisSystemInstance', schema=AnalysisSystemInstanceSchema)
| {
"content_hash": "e50f17bc4299640d695ceeb89012d6f8",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 149,
"avg_line_length": 41.42857142857143,
"alnum_prop": 0.7336206896551725,
"repo_name": "mass-project/mass_server",
"id": "cba19b34d91734a9047efb37933a4c9238d065bf",
"size": "1160",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mass_flask_api/schemas/analysis_system_instance.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "249448"
},
{
"name": "HTML",
"bytes": "925079"
},
{
"name": "JavaScript",
"bytes": "467344"
},
{
"name": "PHP",
"bytes": "3338"
},
{
"name": "Python",
"bytes": "174910"
},
{
"name": "Shell",
"bytes": "2717"
}
],
"symlink_target": ""
} |
'''
check the global_config category is image
@author YeTian 2018-09-20
'''
import zstackwoodpecker.test_util as test_util
#import test_stub
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.operations.config_operations as conf_ops
def test():
global deft_image_1
global deft_image_2
global deft_image_3
global deft_image_4
global deft_image_5
#get the default value
deft_image_1 = conf_ops.get_global_config_default_value('image', 'deletionPolicy')
deft_image_2 = conf_ops.get_global_config_default_value('image', 'deletion.gcInterval')
deft_image_3 = conf_ops.get_global_config_default_value('image', 'expungeInterval')
deft_image_4 = conf_ops.get_global_config_default_value('image', 'expungePeriod')
deft_image_5 = conf_ops.get_global_config_default_value('image', 'enableResetPassword')
# change the default value
conf_ops.change_global_config('image', 'deletionPolicy', 'Direct')
conf_ops.change_global_config('image', 'deletion.gcInterval', '1800')
conf_ops.change_global_config('image', 'expungeInterval', '1800')
conf_ops.change_global_config('image', 'expungePeriod', '3600')
conf_ops.change_global_config('image', 'enableResetPassword', 'false')
# restore defaults
conf_ops.change_global_config('image', 'deletionPolicy', '%s' % deft_image_1)
conf_ops.change_global_config('image', 'deletion.gcInterval', '%s' % deft_image_2)
conf_ops.change_global_config('image', 'expungeInterval', '%s' % deft_image_3)
conf_ops.change_global_config('image', 'expungePeriod', '%s' % deft_image_4)
conf_ops.change_global_config('image', 'enableResetPassword', '%s' % deft_image_5)
#Will be called only if exception happens in test().
def error_cleanup():
global deft_image_1
| {
"content_hash": "71a6a7a4ee44c8705a1fc44fc24a2186",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 91,
"avg_line_length": 38.234042553191486,
"alnum_prop": 0.7100723427935448,
"repo_name": "zstackio/zstack-woodpecker",
"id": "7dcc7ba82ff4085009a55fb5dedfdb260e86659a",
"size": "1797",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "integrationtest/vm/simulator/config/test_config_category_image.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2356"
},
{
"name": "Go",
"bytes": "49822"
},
{
"name": "Makefile",
"bytes": "687"
},
{
"name": "Puppet",
"bytes": "875"
},
{
"name": "Python",
"bytes": "13070596"
},
{
"name": "Shell",
"bytes": "177861"
}
],
"symlink_target": ""
} |
from nova import test
class GraniteTestHostOps(test.TestCase):
def setUp(self):
super(GranitetestHostOps, self).setUp()
| {
"content_hash": "72faf6c87e22b9833e17c0d76d37ff1a",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 47,
"avg_line_length": 26.6,
"alnum_prop": 0.7293233082706767,
"repo_name": "smoser/granite",
"id": "c2e8240f00bbff00e42bdb1a81722b27d3e21a1a",
"size": "133",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "granite/tests/test_hostops.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "47512"
},
{
"name": "Shell",
"bytes": "3934"
}
],
"symlink_target": ""
} |
"""
This file is part of Python Twofish
a Python bridge to the C Twofish library by Niels Ferguson
Released under The BSD 3-Clause License
Copyright (c) 2013 Keybase
setup.py - build and package info
"""
from distutils.core import setup, Extension
twofish_module = Extension('_twofish',
sources=['twofish-0.3/twofish.c', 'twofish.c'],
include_dirs=['twofish-0.3'])
setup(name='twofish',
version='0.3.0',
description='Bindings for the Twofish implementation by Niels Ferguson',
author='Filippo Valsorda',
author_email='filippo.valsorda@gmail.com',
url='http://github.com/keybase/python-twofish',
py_modules=['twofish'],
ext_modules=[twofish_module],
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Topic :: Security :: Cryptography',
'Topic :: Software Development :: Libraries'],
license='3-clause BSD',
long_description=open('README.rst').read())
| {
"content_hash": "a91e85686cb4d50c20af0097e958b27a",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 78,
"avg_line_length": 38.1764705882353,
"alnum_prop": 0.5909090909090909,
"repo_name": "keybase/python-twofish",
"id": "fcc95465ad9172cf1e7cc3e5201b815a65311cea",
"size": "1298",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "64232"
},
{
"name": "Objective-C",
"bytes": "6356"
},
{
"name": "Python",
"bytes": "5011"
}
],
"symlink_target": ""
} |
"""Device drivers for the weewx weather system."""
import syslog
import weewx
class AbstractDevice(object):
"""Device drivers should inherit from this class."""
@property
def hardware_name(self):
raise NotImplementedError("Property 'hardware_name' not implemented")
@property
def archive_interval(self):
raise NotImplementedError("Property 'archive_interval' not implemented")
def genStartupRecords(self, last_ts):
return self.genArchiveRecords(last_ts)
def genLoopPackets(self):
raise NotImplementedError("Method 'genLoopPackets' not implemented")
def genArchiveRecords(self, lastgood_ts):
raise NotImplementedError("Method 'genArchiveRecords' not implemented")
def getTime(self):
raise NotImplementedError("Method 'getTime' not implemented")
def setTime(self):
raise NotImplementedError("Method 'setTime' not implemented")
def closePort(self):
pass
class AbstractConfigurator(object):
"""The configurator class defines an interface for configuring devices.
Inherit from this class to provide a comman-line interface for setting
up a device, querying device status, and other setup/maintenance
operations."""
@property
def description(self):
return "Configuration utility for weewx devices."
@property
def usage(self):
return "%prog [config_file] [options] [--debug] [--help]"
@property
def epilog(self):
return "Be sure to stop weewx first before using. Mutating actions will"\
" request confirmation before proceeding.\n"
def configure(self, config_dict):
parser = self.get_parser()
self.add_options(parser)
options, _ = parser.parse_args()
if options.debug is not None:
weewx.debug = options.debug
syslog.setlogmask(syslog.LOG_UPTO(syslog.LOG_DEBUG))
prompt = False if options.noprompt else True
self.do_options(options, parser, config_dict, prompt)
def get_parser(self):
import optparse
return optparse.OptionParser(description=self.description,
usage=self.usage, epilog=self.epilog)
def add_options(self, parser):
"""Add command line options. Derived classes should override this
method to add more options."""
parser.add_option("--debug", dest="debug",
action="store_true",
help="display diagnostic information while running")
parser.add_option("-y", dest="noprompt",
action="store_true",
help="answer yes to every prompt")
def do_options(self, options, parser, config_dict, prompt):
"""Derived classes must implement this to actually do something."""
raise NotImplementedError("Method 'do_options' not implemented")
class AbstractConfEditor(object):
"""The conf editor class provides methods for producing and updating
configuration stanzas for use in configuration file.
"""
@property
def default_stanza(self):
"""Return a plain text stanza. This will look something like:
[Acme]
# This section is for the Acme weather station
# The station model
model = acme100
# Serial port such as /dev/ttyS0, /dev/ttyUSB0, or /dev/cuaU0
port = /dev/ttyUSB0
# The driver to use:
driver = weewx.drivers.acme
"""
raise NotImplementedError("property 'default_stanza' is not defined")
def get_conf(self, orig_stanza=None):
"""Given a configuration stanza, return a possibly modified copy
that will work with the current version of the device driver.
The default behavior is to return the original stanza, unmodified.
Derived classes should override this if they need to modify previous
configuration options or warn about deprecated or harmful options.
The return value should be a long string. See default_stanza above
for an example string stanza."""
return self.default_stanza if orig_stanza is None else orig_stanza
def prompt_for_settings(self):
"""Prompt for settings required for proper operation of this driver.
"""
return dict()
def _prompt(self, label, dflt=None, opts=None):
import weecfg
val = weecfg.prompt_with_options(label, dflt, opts)
del weecfg
return val
| {
"content_hash": "c4af06e9a0652f341b4275b55cc0f079",
"timestamp": "",
"source": "github",
"line_count": 132,
"max_line_length": 81,
"avg_line_length": 34.50757575757576,
"alnum_prop": 0.6489571899012074,
"repo_name": "tony-rasskazov/meteo",
"id": "7e134647dfff3b68b210f1a278cdf57b644783ec",
"size": "4673",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "weewx/bin/weewx/drivers/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Arduino",
"bytes": "12966"
},
{
"name": "HTML",
"bytes": "5186"
},
{
"name": "JavaScript",
"bytes": "9056"
},
{
"name": "Python",
"bytes": "3806"
}
],
"symlink_target": ""
} |
import logging
import unittest
"""EquiLeader (https://codility.com/demo/take-sample-test/equi_leader/)
Analysis:
- Find leader in O(n) and count_leader (https://codility.com/media/train/6-Leader.pdf)
- identify count_leader_left is valid and count_leader-count_leader_left is valid
"""
__author__ = 'au9ustine'
logging.basicConfig(format='%(message)s', level=logging.DEBUG)
def solution(A):
leader_val = -1
count_of_most = 0
for i in xrange(len(A)):
if count_of_most == 0:
leader_val = A[i]
count_of_most += 1
else:
if A[i] != leader_val:
count_of_most -= 1
else:
count_of_most += 1
count_of_leader = len([val for val in A if val == leader_val])
# If leader does not exist, return 0
if count_of_leader <= len(A) // 2:
return 0
# If leader exists, try seeking EquiLeader
count_of_equi_leader = 0
count_of_leader_left = 0
for i in xrange(len(A)):
if A[i] == leader_val:
count_of_leader_left += 1
count_of_leader_right = count_of_leader - count_of_leader_left
if count_of_leader_left > (i+1) // 2 and count_of_leader_right > (len(A)-i-1) // 2:
count_of_equi_leader += 1
return count_of_equi_leader
class SolutionTest(unittest.TestCase):
def setUp(self):
self.data = [
([4, 3, 4, 4, 4, 2], 2)
]
def test_solution(self):
for input_data, expected in self.data:
actual = solution(input_data)
self.assertEquals(expected, actual)
if __name__ == "__main__":
unittest.main(failfast=True) | {
"content_hash": "aa9fd6455248b44942c2d4723d9e7242",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 91,
"avg_line_length": 29.17543859649123,
"alnum_prop": 0.5790739627179795,
"repo_name": "au9ustine/org.au9ustine.puzzles.codility",
"id": "d14040760315633d54119ea88e27a4ff7c7e3bf8",
"size": "1663",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lessons/lesson06_leader/EquiLeader.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "50619"
}
],
"symlink_target": ""
} |
from datetime import datetime
import math
from flask import (
Flask,
abort,
flash,
redirect,
render_template,
request,
url_for,
)
from flask.ext.stormpath import (
StormpathError,
StormpathManager,
User,
login_required,
login_user,
logout_user,
user,
)
app = Flask(__name__)
app.config['DEBUG'] = True
app.config['SECRET_KEY'] = 'some_really_long_random_string_here'
app.config['STORMPATH_API_KEY_FILE'] = 'apiKey.properties'
app.config['STORMPATH_APPLICATION'] = 'InternWeekenders'
stormpath_manager = StormpathManager(app)
@app.route('/')
def main():
return redirect(url_for('home'))
#@app.route('/add', methods=['POST'])
#@login_required
#def add_post():
# if not user.custom_data.get('posts'):
# user.custom_data['posts'] = []
# user.custom_data['posts'].append({
# 'date': datetime.utcnow().isoformat(),
# 'title': request.form['title'],
# 'text': request.form['text'],
# })
#user.save()
# flash('New post successfully added.')
# return redirect(url_for('show_posts'))
@app.route('/contact')
def contact():
return render_template('contact.html')
@login_required
@app.route('/post')
def post():
return render_template('post.html')
@login_required
@app.route('/read_more<string:post_name>')
def read_more(post_name):
posts = []
comments = []
post_by_name = 0
comments_by_id = []
for account in stormpath_manager.application.accounts:
if account.custom_data.get('posts'):
posts.extend(account.custom_data['posts'])
for post in posts:
if post['title'] == post_name:
post_by_name = post
for account in stormpath_manager.application.accounts:
if account.custom_data.get('comments'):
comments.extend(account.custom_data['comments'])
print comments
for comment in comments:
print comment['comment_id']
print post_by_name['comment_id']
if comment['comment_id'] == post_by_name['comment_id']:
print comment
comments_by_id.append(comment)
print comments_by_id
return render_template('read_more.html', post_name = post_name, post = post_by_name, comments = comments_by_id)
# @login_required
@app.route('/edit<string:title>')
def edit(title):
posts = []
for account in stormpath_manager.application.accounts:
if account.custom_data.get('posts'):
posts.extend(account.custom_data['posts'])
posts = sorted(posts, key=lambda k: k['date'], reverse=True)
for post in posts:
if post['title'] == title:
new_post = post
return render_template('edit.html',post=new_post)
@app.route('/submit', methods=['POST'])
def submit():
if not user.custom_data.get('posts'):
user.custom_data['posts'] = []
user.custom_data['posts'].append({
'date': datetime.utcnow().isoformat(),
'title': request.form['title'],
'location': request.form['location'],
'crowd': request.form['crowd'],
'activity': request.form['activity'],
'expense': request.form['expense'],
'blog': request.form['blog'],
'user_email': str(user),
'comment_id': "1234random",
'vote': 0
})
user.save()
print(user.custom_data['posts'])
return redirect(url_for('sites', page=1,user=str(user)))
@app.route('/update<string:title>', methods=['POST'])
def update(title):
if not user.custom_data.get('posts'):
user.custom_data['posts'] = []
posts = []
for account in stormpath_manager.application.accounts:
if account.custom_data.get('posts'):
posts.extend(account.custom_data['posts'])
posts = sorted(posts, key=lambda k: k['date'], reverse=True)
for post in user.custom_data['posts']:
if post['title'] == title and str(post['user_email']) == str(user):
user.custom_data['posts'].remove(post)
print(user.custom_data['posts'])
user.custom_data['posts'].append({
'date': datetime.utcnow().isoformat(),
'title': request.form['title'],
'location': request.form['location'],
'crowd': request.form['crowd'],
'activity': request.form['activity'],
'expense': request.form['expense'],
'blog': request.form['blog'],
'user_email': str(user),
'vote': 0
})
#print(user.custom_data['posts'])
user.save()
return redirect(url_for('sites', page=1))
@app.route('/comment<string:comment_id>', methods=['POST'])
@login_required
def comment(comment_id):
if not user.custom_data.get('comments'):
user.custom_data['comments'] = []
comment = {
'date': datetime.utcnow().isoformat(),
'title': request.form['title'],
'comment_text': request.form['comment'],
'comment_id': comment_id
}
user.custom_data['comments'].append(comment)
user.save()
'''
posts = []
post_by_name = 0
for account in stormpath_manager.application.accounts:
if account.custom_data.get('posts'):
posts.extend(account.custom_data['posts'])
for post in posts:
if post['comment_id'] == comment_id:
post_by_id = post
'''
return redirect(url_for('sites', page = 1))
@app.route('/votes<string:title>', methods = ['POST'])
def votes(title):
if not user.custom_data.get('posts'):
user.custom_data['posts'] = []
posts = []
for account in stormpath_manager.application.accounts:
if account.custom_data.get('posts'):
posts.extend(account.custom_data['posts'])
posts = sorted(posts, key=lambda k: k['date'], reverse=True)
for post in user.custom_data['posts']:
i = 0
print(post['title'])
print(title)
if post['title'] == title:
print("inside")
post['vote'] += 1
new_post = post
print(new_post)
return redirect(url_for('sites', page=1, new_post=new_post))
# return render_template(url_for('post.html'))
@app.route('/delete<string:title>')
def delete(title):
if not user.custom_data.get('posts'):
user.custom_data['posts'] = []
posts = []
for account in stormpath_manager.application.accounts:
if account.custom_data.get('posts'):
posts.extend(account.custom_data['posts'])
posts = sorted(posts, key=lambda k: k['date'], reverse=True)
for post in user.custom_data['posts']:
if post['title'] == title and str(post['user_email']) == str(user):
user.custom_data['posts'].remove(post)
user.save()
return render_template('delete.html', title=title)
# if not user.custom_data.get('posts'):
# user.custom_data['posts'] = []
#
# posts = []
# for account in stormpath_manager.application.accounts:
# if account.custom_data.get('posts'):
# posts.extend(account.custom_data['posts'])
# posts = sorted(posts, key=lambda k: k['date'], reverse=True)
#
# print(user.custom_data['posts'])
# del user.custom_data['posts'][:]
#
# for post in user.custom_data['posts']:
# i = 0
# if post['title'] == title and post['user_email'] == user:
# print(user.custom_data['posts'])
# del user.custom_data['posts'][i]
#
# print(user.custom_data['posts'])
# user.save()
#
# return redirect(url_for('sites', page=1))
@login_required
@app.route('/sites<int:page>')
def sites(page):
posts = []
for account in stormpath_manager.application.accounts:
if account.custom_data.get('posts'):
posts.extend(account.custom_data['posts'])
posts = sorted(posts, key=lambda k: k['date'], reverse=True)
#calculate the max amount of pages for the amount of posts
total_pgs=math.ceil(len(posts)/3.0)
#ensures that only 3 posts are shown per page
posts = posts[((page-1)*3):(((page-1)*3)+3)]
return render_template('sites.html', posts=posts, page=page, max=total_pgs, user1=str(user))
@app.route('/home')
def home():
posts = []
for account in stormpath_manager.application.accounts:
if account.custom_data.get('posts'):
posts.extend(account.custom_data['posts'])
posts = sorted(posts, key=lambda k: k['date'], reverse=True)
#ensures that only 3 posts are shown on the homepage
if len(posts) > 3:
posts = posts[:3]
return render_template('index.html', posts = posts )
@app.route('/login', methods=['GET', 'POST'])
def login():
error = None
if request.method == 'POST':
try:
_user = User.from_login(
request.form['email'],
request.form['password'],
)
login_user(_user, remember=True)
flash('You were logged in.')
return redirect(url_for('home'))
except StormpathError, err:
error = err.message
return render_template('login.html', error=error)
@app.route('/logout')
def signout():
logout_user()
flash('You were logged out.')
return redirect(url_for('home'))
if __name__ == '__main__':
app.run()
| {
"content_hash": "42af234096e25c08b70961e4a3312bb1",
"timestamp": "",
"source": "github",
"line_count": 315,
"max_line_length": 112,
"avg_line_length": 28.650793650793652,
"alnum_prop": 0.6086426592797783,
"repo_name": "omnipharious/InternWeekenders",
"id": "a4e7be83d26e2602222af9ec0d72e6a9c5325132",
"size": "9025",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "main.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "3418"
},
{
"name": "HTML",
"bytes": "47193"
},
{
"name": "Python",
"bytes": "9025"
}
],
"symlink_target": ""
} |
from flask import render_template, abort, redirect, flash, url_for, request, current_app, make_response
from flask.ext.login import login_required, current_user
from . import main
from .forms import EditProfileForm, EditProfileAdminForm, PostForm, CommentForm
from .. import db
from ..models import Role, User, Post, Permission, Comment
from ..decorators import admin_required, permission_required
@main.route('/', methods=['GET','POST'])
def index():
form = PostForm()
if current_user.can(Permission.WRITE_ARTICLES) and \
form.validate_on_submit():
post = Post(body=form.body.data, author=current_user._get_current_object())
db.session.add(current_user)
return redirect(url_for('.index'))
page = request.args.get('page', 1, type=int)
show_followed = False
if current_user.is_authenticated:
show_followed = bool(request.cookies.get('show_followed', ''))
if show_followed:
query = current_user.followed_posts
else:
query = Post.query
pagination = query.order_by(Post.timestamp.desc()).paginate(
page, per_page=current_app.config['FLASKY_POSTS_PER_PAGE'],
error_out=False)
posts = pagination.items
return render_template('index.html', form=form, posts=posts,
show_followed=show_followed, pagination=pagination)
@main.route('/user/<username>')
def user(username):
user = User.query.filter_by(username=username).first_or_404()
page = request.args.get('page', 1, type=int)
pagination = user.posts.order_by(Post.timestamp.desc()).paginate(
page, per_page=current_app.config['FLASKY_POSTS_PER_PAGE'], error_out=False)
posts = pagination.items
return render_template('user.html', user=user, posts = posts, pagination=pagination)
@main.route('/edit-profile', methods=['POST', 'GET'])
@login_required
def edit_profile():
form = EditProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.about_me = form.about_me.data
db.session.add(current_user)
flash('Your profile has been updated.')
return redirect(url_for('.user'), username=current_user.username)
form.name.data = current_user.name
form.location.data = current_user.location
form.about_me.data = current_user.about_me
return render_template('edit_profile.html', form=form)
@main.route('/edit-profile/<int:id>', methods=['POST', 'GET'])
@login_required
@admin_required
def edit_profile_admin(id):
user = User.query.get_or_404(id)
form = EditProfileAdminForm(user=user)
if form.validate_on_submit():
user.email = form.email.data
user.username = form.username.data
user.confirmed = form.confirmed.data
user.role = Role.query.get(form.role.data)
user.name = form.name.data
user.location = form.location.data
user.about_me = form.about_me.data
db.session.add(user)
flash('Thr profile has been updated.')
return redirect(url_for('.user', username=user.username))
form.email.data = user.email
form.username.data = user.username
form.confirmed.data = user.confirmed
form.role.data = user.role_id
form.name.data = user.name
form.location.data = user.location
form.about_me.data = user.about_me
return render_template('edit_profile.html', form=form, user=user)
@main.route('/post/<int:id>', methods=['POST', 'GET'])
def post(id):
post = Post.query.get_or_404(id)
form = CommentForm()
if form.validate_on_submit():
comment = Comment(body=form.body.data,
post=post,
author=current_user._get_current_object())
db.session.add(comment)
flash('Your comment has been publisher.')
return redirect(url_for('.post', id=post.id, page=-1))
page = request.args.get('page', 1, type=int)
if page == -1:
page = (post.comments.count() - 1) / \
current_app.config['FLASKY_COMMENTS_PER_PAGE'] + 1
pagination = post.comments.order_by(Comment.timestamp.asc()).paginate(
page, per_page=current_app.config['FLASKY_COMMENTS_PER_PAGE'],
error_out=False)
comments = pagination.items
return render_template('post.html', posts=[post], form=form,
comments=comments, pagination=pagination)
@main.route('/edit/<int:id>', methods=['GET', 'POST'])
@login_required
def edit_post(id):
post = Post.query.get_or_404(id)
if current_user != post.author and \
not current_user.can(Permission.ADMINISTER):
abort(403)
form = PostForm()
if form.validate_on_submit():
post.body = form.body.data
db.session.add(post)
flash('The post has been updated.')
return redirect(url_for('post', id=post.id))
form.body.data = post.body
return render_template('edit_post.html', form=form)
@main.route('/follow/<username>')
@login_required
@permission_required(Permission.FOLLOW)
def follow(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid user.')
return redirect(url_for('.index'))
if current_user.is_following(user):
flask('You are already following this user.')
return redirect(url_for('.user',username=username))
current_user.follow(user)
flash('You are now following this user.')
return redirect(url_for('.user', username=username))
@main.route('/unfollow/<username>')
@login_required
@permission_required(Permission.FOLLOW)
def unfollow(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid user.')
return redirect(url_for('index'))
if current_user.is_following(user):
flash('You are already following this user.')
return redirect(url_for('.user', username=username))
current_user.unfollow(user)
flash('You are not following this user.')
return redirect(url_for('.user', username=username))
@main.route('/followers/<username>')
def followers(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid user.')
return redirect(url_for('index'))
page = request.args.get('page', 1, type=int)
pagination = user.followers.paginate(
page, per_page=current_app.config['FLASKY_FOLLOWERS_PER_PAGE'],
error_out=False)
follows = [{'user': item.follower,'timestamp':item.timestamp}
for item in pagination.items]
return render_template('followers.html', user=user, title='Followers of',
endpoint='.followers', pagination=pagination,
follows=follows)
@main.route('/followed-by/<username>')
def followed_by(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid user.')
return redirect(url_for('index'))
page = request.args.get('page', 1, type=int)
pagination = user.followed.paginate(
page, per_page=current_app.config['FLASKY_FOLLOWERS_PER_PAGE'])
follows = [{'user': item.followed, 'timestamp': item.timestamp}
for item in pagination.items]
return render_template('followers.html', user=user, title='Followed by',
endpoint='.followed_by', pagination=pagination,
follows=follows)
@main.route('/all')
@login_required
def show_all():
resp = make_response(redirect(url_for('.index')))
resp.set_cookie('show_followed', '', max_age=30*24*60*60)
return resp
@main.route('/followed')
@login_required
def show_followed():
resp = make_response(redirect(url_for('.index')))
resp.set_cookie('show_followed', '1', max_age=30*24*60*60)
return resp | {
"content_hash": "572cbb76bf0e0e542146fedc973f54f9",
"timestamp": "",
"source": "github",
"line_count": 195,
"max_line_length": 103,
"avg_line_length": 40.48717948717949,
"alnum_prop": 0.650538315389487,
"repo_name": "chestnutme/flaskie",
"id": "41b8c3607c95fa81233d360df5b4cc9e5ecbecf2",
"size": "7895",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/main/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1093"
},
{
"name": "HTML",
"bytes": "11754"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "43348"
}
],
"symlink_target": ""
} |
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.script import *
from test_framework.mininode import *
from test_framework.blocktools import *
COIN = 100000000
SEQUENCE_LOCKTIME_DISABLE_FLAG = (1<<31)
SEQUENCE_LOCKTIME_TYPE_FLAG = (1<<22) # this means use time (0 means height)
SEQUENCE_LOCKTIME_GRANULARITY = 9 # this is a bit-shift
SEQUENCE_LOCKTIME_MASK = 0x0000ffff
# RPC error for non-BIP68 final transactions
NOT_FINAL_ERROR = "64: non-BIP68-final"
class BIP68Test(BitcoinTestFramework):
def setup_network(self):
self.nodes = []
# MVF-Core begin disable HF activation on SegWit due to interference with this test
# actually it is the difficulty reset to non-POW-limit that is problematic for framework
# see https://github.com/BTCfork/hardfork_prototype_1_mvf-core/issues/9
self.nodes.append(start_node(0, self.options.tmpdir, ["-nosegwitfork", "-debug", "-blockprioritysize=0"]))
self.nodes.append(start_node(1, self.options.tmpdir, ["-nosegwitfork", "-debug", "-blockprioritysize=0", "-acceptnonstdtxn=0"]))
# MVF-Core end
self.is_network_split = False
self.relayfee = self.nodes[0].getnetworkinfo()["relayfee"]
connect_nodes(self.nodes[0], 1)
def run_test(self):
# Generate some coins
self.nodes[0].generate(110)
print "Running test disable flag"
self.test_disable_flag()
print "Running test sequence-lock-confirmed-inputs"
self.test_sequence_lock_confirmed_inputs()
print "Running test sequence-lock-unconfirmed-inputs"
self.test_sequence_lock_unconfirmed_inputs()
print "Running test BIP68 not consensus before versionbits activation"
self.test_bip68_not_consensus()
print "Verifying nVersion=2 transactions aren't standard"
self.test_version2_relay(before_activation=True)
print "Activating BIP68 (and 112/113)"
self.activateCSV()
print "Verifying nVersion=2 transactions are now standard"
self.test_version2_relay(before_activation=False)
print "Passed\n"
# Test that BIP68 is not in effect if tx version is 1, or if
# the first sequence bit is set.
def test_disable_flag(self):
# Create some unconfirmed inputs
new_addr = self.nodes[0].getnewaddress()
self.nodes[0].sendtoaddress(new_addr, 2) # send 2 BTC
utxos = self.nodes[0].listunspent(0, 0)
assert(len(utxos) > 0)
utxo = utxos[0]
tx1 = CTransaction()
value = int(satoshi_round(utxo["amount"] - self.relayfee)*COIN)
# Check that the disable flag disables relative locktime.
# If sequence locks were used, this would require 1 block for the
# input to mature.
sequence_value = SEQUENCE_LOCKTIME_DISABLE_FLAG | 1
tx1.vin = [CTxIn(COutPoint(int(utxo["txid"], 16), utxo["vout"]), nSequence=sequence_value)]
tx1.vout = [CTxOut(value, CScript([b'a']))]
tx1_signed = self.nodes[0].signrawtransaction(ToHex(tx1))["hex"]
tx1_id = self.nodes[0].sendrawtransaction(tx1_signed)
tx1_id = int(tx1_id, 16)
# This transaction will enable sequence-locks, so this transaction should
# fail
tx2 = CTransaction()
tx2.nVersion = 2
sequence_value = sequence_value & 0x7fffffff
tx2.vin = [CTxIn(COutPoint(tx1_id, 0), nSequence=sequence_value)]
tx2.vout = [CTxOut(int(value-self.relayfee*COIN), CScript([b'a']))]
tx2.rehash()
try:
self.nodes[0].sendrawtransaction(ToHex(tx2))
except JSONRPCException as exp:
assert_equal(exp.error["message"], NOT_FINAL_ERROR)
else:
assert(False)
# Setting the version back down to 1 should disable the sequence lock,
# so this should be accepted.
tx2.nVersion = 1
self.nodes[0].sendrawtransaction(ToHex(tx2))
# Calculate the median time past of a prior block ("confirmations" before
# the current tip).
def get_median_time_past(self, confirmations):
block_hash = self.nodes[0].getblockhash(self.nodes[0].getblockcount()-confirmations)
return self.nodes[0].getblockheader(block_hash)["mediantime"]
# Test that sequence locks are respected for transactions spending confirmed inputs.
def test_sequence_lock_confirmed_inputs(self):
# Create lots of confirmed utxos, and use them to generate lots of random
# transactions.
max_outputs = 50
addresses = []
while len(addresses) < max_outputs:
addresses.append(self.nodes[0].getnewaddress())
while len(self.nodes[0].listunspent()) < 200:
import random
random.shuffle(addresses)
num_outputs = random.randint(1, max_outputs)
outputs = {}
for i in xrange(num_outputs):
outputs[addresses[i]] = random.randint(1, 20)*0.01
self.nodes[0].sendmany("", outputs)
self.nodes[0].generate(1)
utxos = self.nodes[0].listunspent()
# Try creating a lot of random transactions.
# Each time, choose a random number of inputs, and randomly set
# some of those inputs to be sequence locked (and randomly choose
# between height/time locking). Small random chance of making the locks
# all pass.
for i in xrange(400):
# Randomly choose up to 10 inputs
num_inputs = random.randint(1, 10)
random.shuffle(utxos)
# Track whether any sequence locks used should fail
should_pass = True
# Track whether this transaction was built with sequence locks
using_sequence_locks = False
tx = CTransaction()
tx.nVersion = 2
value = 0
for j in xrange(num_inputs):
sequence_value = 0xfffffffe # this disables sequence locks
# 50% chance we enable sequence locks
if random.randint(0,1):
using_sequence_locks = True
# 10% of the time, make the input sequence value pass
input_will_pass = (random.randint(1,10) == 1)
sequence_value = utxos[j]["confirmations"]
if not input_will_pass:
sequence_value += 1
should_pass = False
# Figure out what the median-time-past was for the confirmed input
# Note that if an input has N confirmations, we're going back N blocks
# from the tip so that we're looking up MTP of the block
# PRIOR to the one the input appears in, as per the BIP68 spec.
orig_time = self.get_median_time_past(utxos[j]["confirmations"])
cur_time = self.get_median_time_past(0) # MTP of the tip
# can only timelock this input if it's not too old -- otherwise use height
can_time_lock = True
if ((cur_time - orig_time) >> SEQUENCE_LOCKTIME_GRANULARITY) >= SEQUENCE_LOCKTIME_MASK:
can_time_lock = False
# if time-lockable, then 50% chance we make this a time lock
if random.randint(0,1) and can_time_lock:
# Find first time-lock value that fails, or latest one that succeeds
time_delta = sequence_value << SEQUENCE_LOCKTIME_GRANULARITY
if input_will_pass and time_delta > cur_time - orig_time:
sequence_value = ((cur_time - orig_time) >> SEQUENCE_LOCKTIME_GRANULARITY)
elif (not input_will_pass and time_delta <= cur_time - orig_time):
sequence_value = ((cur_time - orig_time) >> SEQUENCE_LOCKTIME_GRANULARITY)+1
sequence_value |= SEQUENCE_LOCKTIME_TYPE_FLAG
tx.vin.append(CTxIn(COutPoint(int(utxos[j]["txid"], 16), utxos[j]["vout"]), nSequence=sequence_value))
value += utxos[j]["amount"]*COIN
# Overestimate the size of the tx - signatures should be less than 120 bytes, and leave 50 for the output
tx_size = len(ToHex(tx))//2 + 120*num_inputs + 50
tx.vout.append(CTxOut(int(value-self.relayfee*tx_size*COIN/1000), CScript([b'a'])))
rawtx = self.nodes[0].signrawtransaction(ToHex(tx))["hex"]
try:
self.nodes[0].sendrawtransaction(rawtx)
except JSONRPCException as exp:
assert(not should_pass and using_sequence_locks)
assert_equal(exp.error["message"], NOT_FINAL_ERROR)
else:
assert(should_pass or not using_sequence_locks)
# Recalculate utxos if we successfully sent the transaction
utxos = self.nodes[0].listunspent()
# Test that sequence locks on unconfirmed inputs must have nSequence
# height or time of 0 to be accepted.
# Then test that BIP68-invalid transactions are removed from the mempool
# after a reorg.
def test_sequence_lock_unconfirmed_inputs(self):
# Store height so we can easily reset the chain at the end of the test
cur_height = self.nodes[0].getblockcount()
# Create a mempool tx.
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 2)
tx1 = FromHex(CTransaction(), self.nodes[0].getrawtransaction(txid))
tx1.rehash()
# Anyone-can-spend mempool tx.
# Sequence lock of 0 should pass.
tx2 = CTransaction()
tx2.nVersion = 2
tx2.vin = [CTxIn(COutPoint(tx1.sha256, 0), nSequence=0)]
tx2.vout = [CTxOut(int(tx1.vout[0].nValue - self.relayfee*COIN), CScript([b'a']))]
tx2_raw = self.nodes[0].signrawtransaction(ToHex(tx2))["hex"]
tx2 = FromHex(tx2, tx2_raw)
tx2.rehash()
self.nodes[0].sendrawtransaction(tx2_raw)
# Create a spend of the 0th output of orig_tx with a sequence lock
# of 1, and test what happens when submitting.
# orig_tx.vout[0] must be an anyone-can-spend output
def test_nonzero_locks(orig_tx, node, relayfee, use_height_lock):
sequence_value = 1
if not use_height_lock:
sequence_value |= SEQUENCE_LOCKTIME_TYPE_FLAG
tx = CTransaction()
tx.nVersion = 2
tx.vin = [CTxIn(COutPoint(orig_tx.sha256, 0), nSequence=sequence_value)]
tx.vout = [CTxOut(int(orig_tx.vout[0].nValue - relayfee*COIN), CScript([b'a']))]
tx.rehash()
try:
node.sendrawtransaction(ToHex(tx))
except JSONRPCException as exp:
assert_equal(exp.error["message"], NOT_FINAL_ERROR)
assert(orig_tx.hash in node.getrawmempool())
else:
# orig_tx must not be in mempool
assert(orig_tx.hash not in node.getrawmempool())
return tx
test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=True)
test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=False)
# Now mine some blocks, but make sure tx2 doesn't get mined.
# Use prioritisetransaction to lower the effective feerate to 0
self.nodes[0].prioritisetransaction(tx2.hash, -1e15, int(-self.relayfee*COIN))
cur_time = int(time.time())
for i in xrange(10):
self.nodes[0].setmocktime(cur_time + 600)
self.nodes[0].generate(1)
cur_time += 600
assert(tx2.hash in self.nodes[0].getrawmempool())
test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=True)
test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=False)
# Mine tx2, and then try again
self.nodes[0].prioritisetransaction(tx2.hash, 1e15, int(self.relayfee*COIN))
# Advance the time on the node so that we can test timelocks
self.nodes[0].setmocktime(cur_time+600)
self.nodes[0].generate(1)
assert(tx2.hash not in self.nodes[0].getrawmempool())
# Now that tx2 is not in the mempool, a sequence locked spend should
# succeed
tx3 = test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=False)
assert(tx3.hash in self.nodes[0].getrawmempool())
self.nodes[0].generate(1)
assert(tx3.hash not in self.nodes[0].getrawmempool())
# One more test, this time using height locks
tx4 = test_nonzero_locks(tx3, self.nodes[0], self.relayfee, use_height_lock=True)
assert(tx4.hash in self.nodes[0].getrawmempool())
# Now try combining confirmed and unconfirmed inputs
tx5 = test_nonzero_locks(tx4, self.nodes[0], self.relayfee, use_height_lock=True)
assert(tx5.hash not in self.nodes[0].getrawmempool())
utxos = self.nodes[0].listunspent()
tx5.vin.append(CTxIn(COutPoint(int(utxos[0]["txid"], 16), utxos[0]["vout"]), nSequence=1))
tx5.vout[0].nValue += int(utxos[0]["amount"]*COIN)
raw_tx5 = self.nodes[0].signrawtransaction(ToHex(tx5))["hex"]
try:
self.nodes[0].sendrawtransaction(raw_tx5)
except JSONRPCException as exp:
assert_equal(exp.error["message"], NOT_FINAL_ERROR)
else:
assert(False)
# Test mempool-BIP68 consistency after reorg
#
# State of the transactions in the last blocks:
# ... -> [ tx2 ] -> [ tx3 ]
# tip-1 tip
# And currently tx4 is in the mempool.
#
# If we invalidate the tip, tx3 should get added to the mempool, causing
# tx4 to be removed (fails sequence-lock).
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
assert(tx4.hash not in self.nodes[0].getrawmempool())
assert(tx3.hash in self.nodes[0].getrawmempool())
# Now mine 2 empty blocks to reorg out the current tip (labeled tip-1 in
# diagram above).
# This would cause tx2 to be added back to the mempool, which in turn causes
# tx3 to be removed.
tip = int(self.nodes[0].getblockhash(self.nodes[0].getblockcount()-1), 16)
height = self.nodes[0].getblockcount()
for i in xrange(2):
block = create_block(tip, create_coinbase(height), cur_time)
block.nVersion = 3
block.rehash()
block.solve()
tip = block.sha256
height += 1
self.nodes[0].submitblock(ToHex(block))
cur_time += 1
mempool = self.nodes[0].getrawmempool()
assert(tx3.hash not in mempool)
assert(tx2.hash in mempool)
# Reset the chain and get rid of the mocktimed-blocks
self.nodes[0].setmocktime(0)
self.nodes[0].invalidateblock(self.nodes[0].getblockhash(cur_height+1))
self.nodes[0].generate(10)
# Make sure that BIP68 isn't being used to validate blocks, prior to
# versionbits activation. If more blocks are mined prior to this test
# being run, then it's possible the test has activated the soft fork, and
# this test should be moved to run earlier, or deleted.
def test_bip68_not_consensus(self):
assert(get_bip9_status(self.nodes[0], 'csv')['status'] != 'active')
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 2)
tx1 = FromHex(CTransaction(), self.nodes[0].getrawtransaction(txid))
tx1.rehash()
# Make an anyone-can-spend transaction
tx2 = CTransaction()
tx2.nVersion = 1
tx2.vin = [CTxIn(COutPoint(tx1.sha256, 0), nSequence=0)]
tx2.vout = [CTxOut(int(tx1.vout[0].nValue - self.relayfee*COIN), CScript([b'a']))]
# sign tx2
tx2_raw = self.nodes[0].signrawtransaction(ToHex(tx2))["hex"]
tx2 = FromHex(tx2, tx2_raw)
tx2.rehash()
self.nodes[0].sendrawtransaction(ToHex(tx2))
# Now make an invalid spend of tx2 according to BIP68
sequence_value = 100 # 100 block relative locktime
tx3 = CTransaction()
tx3.nVersion = 2
tx3.vin = [CTxIn(COutPoint(tx2.sha256, 0), nSequence=sequence_value)]
tx3.vout = [CTxOut(int(tx2.vout[0].nValue - self.relayfee*COIN), CScript([b'a']))]
tx3.rehash()
try:
self.nodes[0].sendrawtransaction(ToHex(tx3))
except JSONRPCException as exp:
assert_equal(exp.error["message"], NOT_FINAL_ERROR)
else:
assert(False)
# make a block that violates bip68; ensure that the tip updates
tip = int(self.nodes[0].getbestblockhash(), 16)
block = create_block(tip, create_coinbase(self.nodes[0].getblockcount()+1))
block.nVersion = 3
block.vtx.extend([tx1, tx2, tx3])
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
block.solve()
self.nodes[0].submitblock(ToHex(block))
assert_equal(self.nodes[0].getbestblockhash(), block.hash)
def activateCSV(self):
# activation should happen at block height 432 (3 periods)
min_activation_height = 432
height = self.nodes[0].getblockcount()
assert(height < 432)
self.nodes[0].generate(432-height)
assert(get_bip9_status(self.nodes[0], 'csv')['status'] == 'active')
sync_blocks(self.nodes)
# Use self.nodes[1] to test standardness relay policy
def test_version2_relay(self, before_activation):
inputs = [ ]
outputs = { self.nodes[1].getnewaddress() : 1.0 }
rawtx = self.nodes[1].createrawtransaction(inputs, outputs)
rawtxfund = self.nodes[1].fundrawtransaction(rawtx)['hex']
tx = FromHex(CTransaction(), rawtxfund)
tx.nVersion = 2
tx_signed = self.nodes[1].signrawtransaction(ToHex(tx))["hex"]
try:
tx_id = self.nodes[1].sendrawtransaction(tx_signed)
assert(before_activation == False)
except:
assert(before_activation)
if __name__ == '__main__':
BIP68Test().main()
| {
"content_hash": "4b0456de14bb2552300ad0c155559307",
"timestamp": "",
"source": "github",
"line_count": 420,
"max_line_length": 136,
"avg_line_length": 43.707142857142856,
"alnum_prop": 0.6108841313940186,
"repo_name": "BTCfork/hardfork_prototype_1_mvf-core",
"id": "9533271db7d3634c5ffd6a1e81c0eea1194cc506",
"size": "18605",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "qa/rpc-tests/bip68-sequence.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "648267"
},
{
"name": "C++",
"bytes": "4335646"
},
{
"name": "CSS",
"bytes": "1127"
},
{
"name": "Groff",
"bytes": "3792"
},
{
"name": "HTML",
"bytes": "50621"
},
{
"name": "Java",
"bytes": "2100"
},
{
"name": "M4",
"bytes": "147383"
},
{
"name": "Makefile",
"bytes": "96737"
},
{
"name": "Objective-C",
"bytes": "3692"
},
{
"name": "Objective-C++",
"bytes": "7240"
},
{
"name": "Protocol Buffer",
"bytes": "2308"
},
{
"name": "Python",
"bytes": "761455"
},
{
"name": "QMake",
"bytes": "2020"
},
{
"name": "Shell",
"bytes": "35767"
}
],
"symlink_target": ""
} |
import sys
import json
from soma_workflow.client import Job, Workflow, Helper
# args
all_jobs_file = sys.argv[1]
method_file = sys.argv[2]
workflow_file = sys.argv[3]
with open(method_file, 'r') as f:
method_cfg = json.load(f)
try:
test = method_cfg["model.inner.reducer"]
except:
method_cfg["model.inner.reducer"] = "None"
jobs = []
red_jobs = {}
dep_dic = {}
# read file
with open(all_jobs_file, 'r') as f:
for line in f:
cmd = line.split(" ")[:-1]
if(cmd[1].endswith("multiblox_comparison.mapper.R")):
print "multiblox_comparison.mapper"
design = cmd[-4] #cmd[2].split('_')[-2]
outer = cmd[6]
key = outer+design
if(not dep_dic.has_key(key)):
dep_dic[key] = []
cur_job = Job(command=cmd, name="Map %s outer %s inner %s" %
(design, outer, cmd[5]))
dep_dic[key].append(cur_job)
elif(cmd[1].endswith("multiblox_comparison.inner.reducer.R")):
print "multiblox_comparison.inner.reducer"
design = cmd[-4] #.split('_')[-2]
outer = cmd[5]
key = outer+design
if(not dep_dic.has_key(key)):
dep_dic[key] = []
cur_job = Job(command=cmd, name="Reduce %s outer %s" %
(design, outer))
red_jobs[key] = cur_job
elif(cmd[1].endswith("multiblox_comparison.outer.reducer.R")):
print "multiblox_comparison.outer.reducer"
cur_job = Job(command=cmd, name="Final Reduce")
final_red = cur_job
elif(cmd[1].endswith("coxnet.mapper.R")):
print "coxnet mapper"
cur_job = Job(command=cmd, name="Coxnet")
glmnet_job = cur_job
pass
else:
raise Exception("Unknown task, abort...")
jobs.append(cur_job)
dependencies = []
for k, j in red_jobs.items():
parent_list = dep_dic[k]
for p in parent_list:
dependencies.append((p, j))
dependencies.append((j, final_red))
dependencies.append((glmnet_job, final_red))
workflow = Workflow(jobs=jobs,
dependencies=dependencies)
# save the workflow into a file
Helper.serialize(workflow_file, workflow)
| {
"content_hash": "d914be3477335c204599e3151a0e051b",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 72,
"avg_line_length": 34.3134328358209,
"alnum_prop": 0.5506742061765986,
"repo_name": "vguillemot/multiblox",
"id": "218bb1bcb65078c1594da8e506d79daa99a47b35",
"size": "2299",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "istacox_method_comparison_MapRedR/create_multiblox_comparison_workflow.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "6367"
},
{
"name": "R",
"bytes": "182518"
}
],
"symlink_target": ""
} |
__author__ = 'saeedamen'
from chartpy.chart import Chart
from chartpy.style import Style
from chartpy.canvas import Canvas
from chartpy.chartconstants import ChartConstants
from chartpy.twitter import Twitter
| {
"content_hash": "efc05837a2e75e6a62482d1a57c7c9da",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 49,
"avg_line_length": 30,
"alnum_prop": 0.8285714285714286,
"repo_name": "cuemacro/chartpy",
"id": "9d67d91f6844786c8bf5b84f3c6a90ee733b9574",
"size": "210",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "chartpy/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "41"
},
{
"name": "CSS",
"bytes": "30258"
},
{
"name": "HTML",
"bytes": "418"
},
{
"name": "JavaScript",
"bytes": "407730"
},
{
"name": "Less",
"bytes": "378278"
},
{
"name": "Python",
"bytes": "5342035"
}
],
"symlink_target": ""
} |
from django.shortcuts import get_object_or_404, render_to_response
from django.template import RequestContext
from django.views.generic import TemplateView
from srd20.models import Spell, Feat, Monster
def spell_detail(request, slug):
spell = get_object_or_404(Spell, altname=slug)
return render_to_response('browse/spell.html',
{
'spell': spell,
'editable': request.user.has_perm('srd20.change_spell'),
},
context_instance=RequestContext(request)
)
def feat_detail(request, slug):
feat = get_object_or_404(Feat, altname=slug)
return render_to_response('browse/feat.html',
{
'feat': feat,
'editable': request.user.has_perm('srd20.change_feat'),
},
context_instance=RequestContext(request)
)
def monster_detail(request, slug):
monster = get_object_or_404(Monster, altname=slug)
return render_to_response('browse/monster.html',
{
'monster': monster,
'editable': request.user.has_perm('srd20.change_monster'),
},
context_instance=RequestContext(request)
)
class Favorites(TemplateView):
template_name = "browse/favorites.html"
favorites = Favorites.as_view()
| {
"content_hash": "70a0cedfdf61696d6833288ae1c89a09",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 70,
"avg_line_length": 30.609756097560975,
"alnum_prop": 0.6478087649402391,
"repo_name": "machinalis/django-srd20",
"id": "97cafda9e1bad2f968cb1814f921d78e104c56ad",
"size": "1255",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "browse/views.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "1458"
},
{
"name": "Python",
"bytes": "225560"
}
],
"symlink_target": ""
} |
""" Unittests for nodes.preprocessing """
| {
"content_hash": "9240f41fb72a9ec1c90a7da1e216925d",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 41,
"avg_line_length": 42,
"alnum_prop": 0.7142857142857143,
"repo_name": "pyspace/pyspace",
"id": "beade6313c0b323e9dd556d3fcb18784ee3825cd",
"size": "42",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "pySPACE/tests/unittests/nodes/preprocessing/__init__.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "11128"
},
{
"name": "C++",
"bytes": "309606"
},
{
"name": "Matlab",
"bytes": "3768"
},
{
"name": "Python",
"bytes": "3160853"
},
{
"name": "QMake",
"bytes": "3217"
},
{
"name": "Shell",
"bytes": "253"
}
],
"symlink_target": ""
} |
from .base import * # NOQA
import logging.config
# For security and performance reasons, DEBUG is turned off
DEBUG = False
# Must mention ALLOWED_HOSTS in production!
ALLOWED_HOSTS = ['pycontw.krdai.info', 'tw.pycon.org']
# Override static and media URL for prefix in WSGI server.
# https://code.djangoproject.com/ticket/25598
STATIC_URL = '/2016/static/'
MEDIA_URL = '/2016/media/'
# Cache the templates in memory for speed-up
loaders = [
(
'django.template.loaders.cached.Loader',
[
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
]
),
]
TEMPLATES[0]['OPTIONS'].update({"loaders": loaders})
TEMPLATES[0]['OPTIONS'].update({"debug": False})
del TEMPLATES[0]['APP_DIRS']
# Explicitly tell Django where to find translations.
LOCALE_PATHS = [join(BASE_DIR, 'locale')]
# Log everything to the logs directory at the top
LOGFILE_ROOT = join(dirname(BASE_DIR), 'logs')
# Reset logging
LOGGING_CONFIG = None
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': (
'[%(asctime)s] %(levelname)s '
'[%(pathname)s:%(lineno)s] %(message)s'
),
'datefmt': "%d/%b/%Y %H:%M:%S"
},
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'handlers': {
'proj_log_file': {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': join(LOGFILE_ROOT, 'project.log'),
'formatter': 'verbose'
},
'django_log_file': {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': join(LOGFILE_ROOT, 'django.log'),
'formatter': 'verbose'
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple'
}
},
'loggers': {
'django': {
'handlers': ['django_log_file'],
'propagate': True,
'level': 'DEBUG',
},
'project': {
'handlers': ['proj_log_file'],
'level': 'DEBUG',
},
}
}
logging.config.dictConfig(LOGGING)
EMAIL_BACKEND = env.email_url()['EMAIL_BACKEND']
EMAIL_HOST = env.email_url()['EMAIL_HOST']
EMAIL_HOST_PASSWORD = env.email_url()['EMAIL_HOST_PASSWORD']
EMAIL_HOST_USER = env.email_url()['EMAIL_HOST_USER']
EMAIL_PORT = env.email_url()['EMAIL_PORT']
EMAIL_USE_TLS = env.email_url()['EMAIL_USE_TLS']
DEFAULT_FROM_EMAIL = SERVER_EMAIL = '{name} <{addr}>'.format(
name='PyCon Taiwan',
addr=EMAIL_HOST_USER,
)
# Securiy related settings
SECURE_HSTS_SECONDS = 2592000
SECURE_BROWSER_XSS_FILTER = True
SESSION_COOKIE_SECURE = True
CSRF_COOKIE_SECURE = True
CSRF_COOKIE_HTTPONLY = True
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
X_FRAME_OPTIONS = 'DENY'
# Setting for sentry
INSTALLED_APPS += (
'raven.contrib.django.raven_compat',
)
import raven # noqa
RAVEN_CONFIG = {
'dsn': env('DSN_URL'),
}
GA_TRACK_ID = env('GA_TRACK_ID', default=None)
| {
"content_hash": "aeac6a615d85fa6ce4b3fc6e446f290e",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 61,
"avg_line_length": 26.297520661157026,
"alnum_prop": 0.5760527969830296,
"repo_name": "uranusjr/pycontw2016",
"id": "cab2eb48661be050020292961ae95aa0d5e4476b",
"size": "3294",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/pycontw2016/settings/production.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "59719"
},
{
"name": "HTML",
"bytes": "141751"
},
{
"name": "JavaScript",
"bytes": "4475"
},
{
"name": "Python",
"bytes": "229546"
},
{
"name": "Shell",
"bytes": "389"
}
],
"symlink_target": ""
} |
import sys
import os
import logging
import numpy as np
from click.testing import CliRunner
import rasterio
from rasterio.rio.main import main_group
from rasterio.rio.convert import convert
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
TEST_BBOX = [-11850000, 4804000, -11840000, 4808000]
def test_clip_bounds(runner, tmpdir):
output = str(tmpdir.join('test.tif'))
result = runner.invoke(
main_group,
['clip', 'tests/data/shade.tif', output, '--bounds'] + TEST_BBOX)
assert result.exit_code == 0
assert os.path.exists(output)
with rasterio.open(output) as out:
assert out.shape == (420, 173)
def test_clip_like(runner, tmpdir):
output = str(tmpdir.join('test.tif'))
result = runner.invoke(
main_group, [
'clip', 'tests/data/shade.tif', output, '--like',
'tests/data/shade.tif'])
assert result.exit_code == 0
assert os.path.exists(output)
with rasterio.open('tests/data/shade.tif') as template_ds:
with rasterio.open(output) as out:
assert out.shape == template_ds.shape
assert np.allclose(out.bounds, template_ds.bounds)
def test_clip_missing_params(runner, tmpdir):
output = str(tmpdir.join('test.tif'))
result = runner.invoke(
main_group, ['clip', 'tests/data/shade.tif', output])
assert result.exit_code == 2
assert '--bounds or --like required' in result.output
def test_clip_bounds_disjunct(runner, tmpdir):
output = str(tmpdir.join('test.tif'))
result = runner.invoke(
main_group,
['clip', 'tests/data/shade.tif', output, '--bounds'] + [0, 0, 10, 10])
assert result.exit_code == 2
assert '--bounds' in result.output
def test_clip_like_disjunct(runner, tmpdir):
output = str(tmpdir.join('test.tif'))
result = runner.invoke(
main_group, [
'clip', 'tests/data/shade.tif', output, '--like',
'tests/data/RGB.byte.tif'])
assert result.exit_code == 2
assert '--like' in result.output
# Tests: format and type conversion, --format and --dtype
def test_format(tmpdir):
outputname = str(tmpdir.join('test.jpg'))
runner = CliRunner()
result = runner.invoke(
convert,
['tests/data/RGB.byte.tif', outputname, '--format', 'JPEG'])
assert result.exit_code == 0
with rasterio.open(outputname) as src:
assert src.driver == 'JPEG'
def test_format_short(tmpdir):
outputname = str(tmpdir.join('test.jpg'))
runner = CliRunner()
result = runner.invoke(
convert,
['tests/data/RGB.byte.tif', outputname, '-f', 'JPEG'])
assert result.exit_code == 0
with rasterio.open(outputname) as src:
assert src.driver == 'JPEG'
def test_output_opt(tmpdir):
outputname = str(tmpdir.join('test.jpg'))
runner = CliRunner()
result = runner.invoke(
convert,
['tests/data/RGB.byte.tif', '-o', outputname, '-f', 'JPEG'])
assert result.exit_code == 0
with rasterio.open(outputname) as src:
assert src.driver == 'JPEG'
def test_dtype(tmpdir):
outputname = str(tmpdir.join('test.tif'))
runner = CliRunner()
result = runner.invoke(
convert,
['tests/data/RGB.byte.tif', outputname, '--dtype', 'uint16'])
assert result.exit_code == 0
with rasterio.open(outputname) as src:
assert src.dtypes == tuple(['uint16'] * 3)
def test_dtype_rescaling_uint8_full(tmpdir):
"""Rescale uint8 [0, 255] to uint8 [0, 255]"""
outputname = str(tmpdir.join('test.tif'))
runner = CliRunner()
result = runner.invoke(
convert,
['tests/data/RGB.byte.tif', outputname, '--scale-ratio', '1.0'])
assert result.exit_code == 0
src_stats = [
{"max": 255.0, "mean": 44.434478650699106, "min": 1.0},
{"max": 255.0, "mean": 66.02203484105824, "min": 1.0},
{"max": 255.0, "mean": 71.39316199120559, "min": 1.0}]
with rasterio.open(outputname) as src:
for band, expected in zip(src.read(masked=True), src_stats):
assert round(band.min() - expected['min'], 6) == 0.0
assert round(band.max() - expected['max'], 6) == 0.0
assert round(band.mean() - expected['mean'], 6) == 0.0
def test_dtype_rescaling_uint8_half(tmpdir):
"""Rescale uint8 [0, 255] to uint8 [0, 127]"""
outputname = str(tmpdir.join('test.tif'))
runner = CliRunner()
result = runner.invoke(convert, [
'tests/data/RGB.byte.tif', outputname, '--scale-ratio', '0.5'])
assert result.exit_code == 0
with rasterio.open(outputname) as src:
for band in src.read():
assert round(band.min() - 0, 6) == 0.0
assert round(band.max() - 127, 6) == 0.0
def test_dtype_rescaling_uint16(tmpdir):
"""Rescale uint8 [0, 255] to uint16 [0, 4095]"""
# NB: 255 * 16 is 4080, we don't actually get to 4095.
outputname = str(tmpdir.join('test.tif'))
runner = CliRunner()
result = runner.invoke(convert, [
'tests/data/RGB.byte.tif', outputname, '--dtype', 'uint16',
'--scale-ratio', '16'])
assert result.exit_code == 0
with rasterio.open(outputname) as src:
for band in src.read():
assert round(band.min() - 0, 6) == 0.0
assert round(band.max() - 4080, 6) == 0.0
def test_dtype_rescaling_float64(tmpdir):
"""Rescale uint8 [0, 255] to float64 [-1, 1]"""
outputname = str(tmpdir.join('test.tif'))
runner = CliRunner()
result = runner.invoke(convert, [
'tests/data/RGB.byte.tif', outputname, '--dtype', 'float64',
'--scale-ratio', str(2.0 / 255), '--scale-offset', '-1.0'])
assert result.exit_code == 0
with rasterio.open(outputname) as src:
for band in src.read():
assert round(band.min() + 1.0, 6) == 0.0
assert round(band.max() - 1.0, 6) == 0.0
def test_rgb(tmpdir):
outputname = str(tmpdir.join('test.tif'))
runner = CliRunner()
result = runner.invoke(
convert,
['tests/data/RGB.byte.tif', outputname, '--rgb'])
assert result.exit_code == 0
with rasterio.open(outputname) as src:
assert src.colorinterp(1) == rasterio.enums.ColorInterp.red
| {
"content_hash": "705220cb20e1275a833ebff7c468117c",
"timestamp": "",
"source": "github",
"line_count": 188,
"max_line_length": 78,
"avg_line_length": 33.101063829787236,
"alnum_prop": 0.6090310139803953,
"repo_name": "kapadia/rasterio",
"id": "b2446a5f62c4533ec2de7b08cc0f7d0a085aba64",
"size": "6223",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_rio_convert.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C++",
"bytes": "34752"
},
{
"name": "Python",
"bytes": "457334"
},
{
"name": "Shell",
"bytes": "742"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals, print_function, division
__author__ = "mozman <mozman@gmx.at>"
import unittest
from ezodf2.xmlns import CN, etree
from ezodf2.tableutils import get_table_rows, get_min_max_cell_count
# objects to test
from ezodf2.tablenormalizer import normalize_table
def get_nrows_ncols(table):
rows = get_table_rows(table)
nrows = len(rows)
ncols = len(rows[0])
return nrows, ncols
class TestInitTable(unittest.TestCase):
def test_init_node_error(self):
with self.assertRaises(ValueError):
normalize_table(xmlnode=etree.Element(CN('error')))
TABLE_5x3 = """
<table:table xmlns:table="urn:oasis:names:tc:opendocument:xmlns:table:1.0">
<table:table-header-rows>
<table:table-row><table:table-cell /><table:table-cell /><table:table-cell /></table:table-row>
<table:table-row><table:table-cell /><table:table-cell /><table:table-cell /></table:table-row>
</table:table-header-rows>
<table:table-rows>
<table:table-row><table:table-cell /><table:table-cell /><table:table-cell /></table:table-row>
<table:table-row><table:table-cell /><table:table-cell /><table:table-cell /></table:table-row>
<table:table-row><table:table-cell /><table:table-cell /><table:table-cell /></table:table-row>
</table:table-rows>
</table:table>
"""
class TestUncompressedTable(unittest.TestCase):
def test_init_node_error(self):
with self.assertRaises(ValueError):
normalize_table(xmlnode=etree.Element(CN('error')))
def test_uncompressed_content(self):
table = etree.XML(TABLE_5x3)
normalize_table(table)
nrows, ncols = get_nrows_ncols(table)
self.assertEqual(5, nrows)
self.assertEqual(3, ncols)
TABLE_REP_7x7_EXPAND_ALL= """
<table:table xmlns:table="urn:oasis:names:tc:opendocument:xmlns:table:1.0">
<table:table-header-rows>
<table:table-row><table:table-cell table:number-columns-repeated="7"/></table:table-row>
</table:table-header-rows>
<table:table-rows>
<table:table-row table:number-rows-repeated="6"><table:table-cell table:number-columns-repeated="7" /></table:table-row>
</table:table-rows>
</table:table>
"""
class TestExpandAll(unittest.TestCase):
def test_expand_content(self):
table = etree.XML(TABLE_REP_7x7_EXPAND_ALL)
normalize_table(table, expand='all')
nrows, ncols = get_nrows_ncols(table)
self.assertEqual(7, nrows)
self.assertEqual(7, ncols)
TABLE_REP_7x7_ALL_BUT_LAST = """
<table:table xmlns:table="urn:oasis:names:tc:opendocument:xmlns:table:1.0">
<table:table-header-rows>
<table:table-row><table:table-cell table:number-columns-repeated="6"/><table:table-cell /></table:table-row>
</table:table-header-rows>
<table:table-rows>
<table:table-row table:number-rows-repeated="5"><table:table-cell table:number-columns-repeated="6" /><table:table-cell /></table:table-row>
<table:table-row><table:table-cell table:number-columns-repeated="6"/><table:table-cell /></table:table-row>
</table:table-rows>
</table:table>
"""
# Last row is repeated only once, repetition attribute of last row/col is ignored
TABLE_REP_7x7_ALL_BUT_LAST_2 = """
<table:table xmlns:table="urn:oasis:names:tc:opendocument:xmlns:table:1.0">
<table:table-header-rows>
<table:table-row><table:table-cell table:number-columns-repeated="6"/><table:table-cell table:number-columns-repeated="99999"/></table:table-row>
</table:table-header-rows>
<table:table-rows>
<table:table-row table:number-rows-repeated="5"><table:table-cell table:number-columns-repeated="6" /><table:table-cell /></table:table-row>
<table:table-row table:number-rows-repeated="99999"><table:table-cell table:number-columns-repeated="6"/><table:table-cell /></table:table-row>
</table:table-rows>
</table:table>
"""
class TestExpandAllButLast(unittest.TestCase):
def test_expand_content(self):
table = etree.XML(TABLE_REP_7x7_ALL_BUT_LAST)
normalize_table(table, expand="all_but_last")
nrows, ncols = get_nrows_ncols(table)
self.assertEqual(7, nrows)
self.assertEqual(7, ncols)
def test_expand_content_ignore_last_repetition(self):
# Last row is repeated only once, repetition attribute of last row/col is ignored
table = etree.XML(TABLE_REP_7x7_ALL_BUT_LAST_2)
normalize_table(table, expand="all_but_last")
nrows, ncols = get_nrows_ncols(table)
self.assertEqual(7, nrows)
self.assertEqual(7, ncols)
TABLE_REP_7x11_ALL_LESS_MAXCOUNT = """
<table:table xmlns:table="urn:oasis:names:tc:opendocument:xmlns:table:1.0">
<table:table-header-rows>
<table:table-row><table:table-cell table:number-columns-repeated="6"/><table:table-cell table:number-columns-repeated="999"/></table:table-row>
</table:table-header-rows>
<table:table-rows>
<table:table-row table:number-rows-repeated="5"><table:table-cell table:number-columns-repeated="7" /></table:table-row>
<table:table-row table:number-rows-repeated="5"><table:table-cell table:number-columns-repeated="7"/></table:table-row>
</table:table-rows>
</table:table>
"""
class TestExpandAllLessMaxcount(unittest.TestCase):
def test_expand_content(self):
table = etree.XML(TABLE_REP_7x11_ALL_LESS_MAXCOUNT)
normalize_table(table, expand="all_less_maxcount", maxcount=(32, 32))
nrows, ncols = get_nrows_ncols(table)
self.assertEqual(11, nrows)
self.assertEqual(7, ncols)
UNALIGNED_TABLE_3_2_1 = """
<table:table xmlns:table="urn:oasis:names:tc:opendocument:xmlns:table:1.0">
<table:table-header-rows>
<table:table-row><table:table-cell /><table:table-cell /><table:table-cell /></table:table-row>
</table:table-header-rows>
<table:table-rows>
<table:table-row><table:table-cell /><table:table-cell /></table:table-row>
<table:table-row><table:table-cell /></table:table-row>
</table:table-rows>
</table:table>
"""
class TestAlignTableRows(unittest.TestCase):
def setUp(self):
self.table = etree.XML(UNALIGNED_TABLE_3_2_1)
def test_min_max_cell_count(self):
cmin, cmax = get_min_max_cell_count(self.table)
self.assertEqual(1, cmin, 'expected min cols == 1')
self.assertEqual(3, cmax, 'expected max cols == 3')
def test_align_table_rows(self):
normalize_table(self.table)
cmin, cmax = get_min_max_cell_count(self.table)
self.assertEqual(3, cmin, "table contains rows with cell-count < 3.")
if __name__=='__main__':
unittest.main()
| {
"content_hash": "b0e92b1ea18097809ce0a5a8a75d1b5d",
"timestamp": "",
"source": "github",
"line_count": 163,
"max_line_length": 147,
"avg_line_length": 40.67484662576687,
"alnum_prop": 0.6808446455505279,
"repo_name": "iwschris/ezodf2",
"id": "7b9b6d5f0ca9cf99e3e364e22c1f3fdc0172454b",
"size": "6781",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_table_normalizer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "351944"
},
{
"name": "Shell",
"bytes": "4505"
}
],
"symlink_target": ""
} |
""" support for skip/xfail functions and markers. """
import os
import sys
import traceback
import py
import pytest
from _pytest.mark import MarkInfo, MarkDecorator
def pytest_addoption(parser):
group = parser.getgroup("general")
group.addoption('--runxfail',
action="store_true", dest="runxfail", default=False,
help="run tests even if they are marked xfail")
parser.addini("xfail_strict", "default for the strict parameter of xfail "
"markers when not given explicitly (default: "
"False)",
default=False,
type="bool")
def pytest_configure(config):
if config.option.runxfail:
old = pytest.xfail
config._cleanup.append(lambda: setattr(pytest, "xfail", old))
def nop(*args, **kwargs):
pass
nop.Exception = XFailed
setattr(pytest, "xfail", nop)
config.addinivalue_line("markers",
"skip(reason=None): skip the given test function with an optional reason. "
"Example: skip(reason=\"no way of currently testing this\") skips the "
"test."
)
config.addinivalue_line("markers",
"skipif(condition): skip the given test function if eval(condition) "
"results in a True value. Evaluation happens within the "
"module global context. Example: skipif('sys.platform == \"win32\"') "
"skips the test if we are on the win32 platform. see "
"http://pytest.org/latest/skipping.html"
)
config.addinivalue_line("markers",
"xfail(condition, reason=None, run=True, raises=None, strict=False): "
"mark the test function as an expected failure if eval(condition) "
"has a True value. Optionally specify a reason for better reporting "
"and run=False if you don't even want to execute the test function. "
"If only specific exception(s) are expected, you can list them in "
"raises, and if the test fails in other ways, it will be reported as "
"a true failure. See http://pytest.org/latest/skipping.html"
)
def pytest_namespace():
return dict(xfail=xfail)
class XFailed(pytest.fail.Exception):
""" raised from an explicit call to pytest.xfail() """
def xfail(reason=""):
""" xfail an executing test or setup functions with the given reason."""
__tracebackhide__ = True
raise XFailed(reason)
xfail.Exception = XFailed
class MarkEvaluator:
def __init__(self, item, name):
self.item = item
self.name = name
@property
def holder(self):
return self.item.keywords.get(self.name)
def __bool__(self):
return bool(self.holder)
__nonzero__ = __bool__
def wasvalid(self):
return not hasattr(self, 'exc')
def invalidraise(self, exc):
raises = self.get('raises')
if not raises:
return
return not isinstance(exc, raises)
def istrue(self):
try:
return self._istrue()
except Exception:
self.exc = sys.exc_info()
if isinstance(self.exc[1], SyntaxError):
msg = [" " * (self.exc[1].offset + 4) + "^",]
msg.append("SyntaxError: invalid syntax")
else:
msg = traceback.format_exception_only(*self.exc[:2])
pytest.fail("Error evaluating %r expression\n"
" %s\n"
"%s"
%(self.name, self.expr, "\n".join(msg)),
pytrace=False)
def _getglobals(self):
d = {'os': os, 'sys': sys, 'config': self.item.config}
d.update(self.item.obj.__globals__)
return d
def _istrue(self):
if hasattr(self, 'result'):
return self.result
if self.holder:
d = self._getglobals()
if self.holder.args or 'condition' in self.holder.kwargs:
self.result = False
# "holder" might be a MarkInfo or a MarkDecorator; only
# MarkInfo keeps track of all parameters it received in an
# _arglist attribute
if hasattr(self.holder, '_arglist'):
arglist = self.holder._arglist
else:
arglist = [(self.holder.args, self.holder.kwargs)]
for args, kwargs in arglist:
if 'condition' in kwargs:
args = (kwargs['condition'],)
for expr in args:
self.expr = expr
if isinstance(expr, py.builtin._basestring):
result = cached_eval(self.item.config, expr, d)
else:
if "reason" not in kwargs:
# XXX better be checked at collection time
msg = "you need to specify reason=STRING " \
"when using booleans as conditions."
pytest.fail(msg)
result = bool(expr)
if result:
self.result = True
self.reason = kwargs.get('reason', None)
self.expr = expr
return self.result
else:
self.result = True
return getattr(self, 'result', False)
def get(self, attr, default=None):
return self.holder.kwargs.get(attr, default)
def getexplanation(self):
expl = getattr(self, 'reason', None) or self.get('reason', None)
if not expl:
if not hasattr(self, 'expr'):
return ""
else:
return "condition: " + str(self.expr)
return expl
@pytest.hookimpl(tryfirst=True)
def pytest_runtest_setup(item):
# Check if skip or skipif are specified as pytest marks
skipif_info = item.keywords.get('skipif')
if isinstance(skipif_info, (MarkInfo, MarkDecorator)):
eval_skipif = MarkEvaluator(item, 'skipif')
if eval_skipif.istrue():
item._evalskip = eval_skipif
pytest.skip(eval_skipif.getexplanation())
skip_info = item.keywords.get('skip')
if isinstance(skip_info, (MarkInfo, MarkDecorator)):
item._evalskip = True
if 'reason' in skip_info.kwargs:
pytest.skip(skip_info.kwargs['reason'])
elif skip_info.args:
pytest.skip(skip_info.args[0])
else:
pytest.skip("unconditional skip")
item._evalxfail = MarkEvaluator(item, 'xfail')
check_xfail_no_run(item)
@pytest.mark.hookwrapper
def pytest_pyfunc_call(pyfuncitem):
check_xfail_no_run(pyfuncitem)
outcome = yield
passed = outcome.excinfo is None
if passed:
check_strict_xfail(pyfuncitem)
def check_xfail_no_run(item):
"""check xfail(run=False)"""
if not item.config.option.runxfail:
evalxfail = item._evalxfail
if evalxfail.istrue():
if not evalxfail.get('run', True):
pytest.xfail("[NOTRUN] " + evalxfail.getexplanation())
def check_strict_xfail(pyfuncitem):
"""check xfail(strict=True) for the given PASSING test"""
evalxfail = pyfuncitem._evalxfail
if evalxfail.istrue():
strict_default = pyfuncitem.config.getini('xfail_strict')
is_strict_xfail = evalxfail.get('strict', strict_default)
if is_strict_xfail:
del pyfuncitem._evalxfail
explanation = evalxfail.getexplanation()
pytest.fail('[XPASS(strict)] ' + explanation, pytrace=False)
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_makereport(item, call):
outcome = yield
rep = outcome.get_result()
evalxfail = getattr(item, '_evalxfail', None)
evalskip = getattr(item, '_evalskip', None)
# unitttest special case, see setting of _unexpectedsuccess
if hasattr(item, '_unexpectedsuccess') and rep.when == "call":
from _pytest.compat import _is_unittest_unexpected_success_a_failure
if item._unexpectedsuccess:
rep.longrepr = "Unexpected success: {0}".format(item._unexpectedsuccess)
else:
rep.longrepr = "Unexpected success"
if _is_unittest_unexpected_success_a_failure():
rep.outcome = "failed"
else:
rep.outcome = "passed"
rep.wasxfail = rep.longrepr
elif item.config.option.runxfail:
pass # don't interefere
elif call.excinfo and call.excinfo.errisinstance(pytest.xfail.Exception):
rep.wasxfail = "reason: " + call.excinfo.value.msg
rep.outcome = "skipped"
elif evalxfail and not rep.skipped and evalxfail.wasvalid() and \
evalxfail.istrue():
if call.excinfo:
if evalxfail.invalidraise(call.excinfo.value):
rep.outcome = "failed"
else:
rep.outcome = "skipped"
rep.wasxfail = evalxfail.getexplanation()
elif call.when == "call":
strict_default = item.config.getini('xfail_strict')
is_strict_xfail = evalxfail.get('strict', strict_default)
explanation = evalxfail.getexplanation()
if is_strict_xfail:
rep.outcome = "failed"
rep.longrepr = "[XPASS(strict)] {0}".format(explanation)
else:
rep.outcome = "passed"
rep.wasxfail = explanation
elif evalskip is not None and rep.skipped and type(rep.longrepr) is tuple:
# skipped by mark.skipif; change the location of the failure
# to point to the item definition, otherwise it will display
# the location of where the skip exception was raised within pytest
filename, line, reason = rep.longrepr
filename, line = item.location[:2]
rep.longrepr = filename, line, reason
# called by terminalreporter progress reporting
def pytest_report_teststatus(report):
if hasattr(report, "wasxfail"):
if report.skipped:
return "xfailed", "x", "xfail"
elif report.passed:
return "xpassed", "X", ("XPASS", {'yellow': True})
# called by the terminalreporter instance/plugin
def pytest_terminal_summary(terminalreporter):
tr = terminalreporter
if not tr.reportchars:
#for name in "xfailed skipped failed xpassed":
# if not tr.stats.get(name, 0):
# tr.write_line("HINT: use '-r' option to see extra "
# "summary info about tests")
# break
return
lines = []
for char in tr.reportchars:
if char == "x":
show_xfailed(terminalreporter, lines)
elif char == "X":
show_xpassed(terminalreporter, lines)
elif char in "fF":
show_simple(terminalreporter, lines, 'failed', "FAIL %s")
elif char in "sS":
show_skipped(terminalreporter, lines)
elif char == "E":
show_simple(terminalreporter, lines, 'error', "ERROR %s")
elif char == 'p':
show_simple(terminalreporter, lines, 'passed', "PASSED %s")
if lines:
tr._tw.sep("=", "short test summary info")
for line in lines:
tr._tw.line(line)
def show_simple(terminalreporter, lines, stat, format):
failed = terminalreporter.stats.get(stat)
if failed:
for rep in failed:
pos = terminalreporter.config.cwd_relative_nodeid(rep.nodeid)
lines.append(format %(pos,))
def show_xfailed(terminalreporter, lines):
xfailed = terminalreporter.stats.get("xfailed")
if xfailed:
for rep in xfailed:
pos = terminalreporter.config.cwd_relative_nodeid(rep.nodeid)
reason = rep.wasxfail
lines.append("XFAIL %s" % (pos,))
if reason:
lines.append(" " + str(reason))
def show_xpassed(terminalreporter, lines):
xpassed = terminalreporter.stats.get("xpassed")
if xpassed:
for rep in xpassed:
pos = terminalreporter.config.cwd_relative_nodeid(rep.nodeid)
reason = rep.wasxfail
lines.append("XPASS %s %s" %(pos, reason))
def cached_eval(config, expr, d):
if not hasattr(config, '_evalcache'):
config._evalcache = {}
try:
return config._evalcache[expr]
except KeyError:
import _pytest._code
exprcode = _pytest._code.compile(expr, mode="eval")
config._evalcache[expr] = x = eval(exprcode, d)
return x
def folded_skips(skipped):
d = {}
for event in skipped:
key = event.longrepr
assert len(key) == 3, (event, key)
d.setdefault(key, []).append(event)
l = []
for key, events in d.items():
l.append((len(events),) + key)
return l
def show_skipped(terminalreporter, lines):
tr = terminalreporter
skipped = tr.stats.get('skipped', [])
if skipped:
#if not tr.hasopt('skipped'):
# tr.write_line(
# "%d skipped tests, specify -rs for more info" %
# len(skipped))
# return
fskips = folded_skips(skipped)
if fskips:
#tr.write_sep("_", "skipped test summary")
for num, fspath, lineno, reason in fskips:
if reason.startswith("Skipped: "):
reason = reason[9:]
lines.append("SKIP [%d] %s:%d: %s" %
(num, fspath, lineno, reason))
| {
"content_hash": "b8192ba77f66012ad79d4d6b206b10c7",
"timestamp": "",
"source": "github",
"line_count": 375,
"max_line_length": 84,
"avg_line_length": 36.312,
"alnum_prop": 0.571271205111258,
"repo_name": "vicky2135/lucious",
"id": "91a34169f620ebd8d1aa2ca2cfa4be330c970aee",
"size": "13617",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "oscar/lib/python2.7/site-packages/_pytest/skipping.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "896683"
},
{
"name": "C++",
"bytes": "52230"
},
{
"name": "CSS",
"bytes": "1169533"
},
{
"name": "HTML",
"bytes": "1104983"
},
{
"name": "JavaScript",
"bytes": "1055140"
},
{
"name": "Makefile",
"bytes": "145238"
},
{
"name": "Python",
"bytes": "55993261"
},
{
"name": "Shell",
"bytes": "40487"
}
],
"symlink_target": ""
} |
from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._virtual_machine_scale_set_vm_extensions_operations import (
build_create_or_update_request,
build_delete_request,
build_get_request,
build_list_request,
build_update_request,
)
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class VirtualMachineScaleSetVMExtensionsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.compute.v2020_06_01.aio.ComputeManagementClient`'s
:attr:`virtual_machine_scale_set_vm_extensions` attribute.
"""
models = _models
def __init__(self, *args, **kwargs) -> None:
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
async def _create_or_update_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
vm_extension_name: str,
extension_parameters: Union[_models.VirtualMachineScaleSetVMExtension, IO],
**kwargs: Any
) -> _models.VirtualMachineScaleSetVMExtension:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-06-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.VirtualMachineScaleSetVMExtension]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(extension_parameters, (IO, bytes)):
_content = extension_parameters
else:
_json = self._serialize.body(extension_parameters, "VirtualMachineScaleSetVMExtension")
request = build_create_or_update_request(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
vm_extension_name=vm_extension_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._create_or_update_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize("VirtualMachineScaleSetVMExtension", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("VirtualMachineScaleSetVMExtension", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualMachines/{instanceId}/extensions/{vmExtensionName}"} # type: ignore
@overload
async def begin_create_or_update(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
vm_extension_name: str,
extension_parameters: _models.VirtualMachineScaleSetVMExtension,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.VirtualMachineScaleSetVMExtension]:
"""The operation to create or update the VMSS VM extension.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set. Required.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine. Required.
:type instance_id: str
:param vm_extension_name: The name of the virtual machine extension. Required.
:type vm_extension_name: str
:param extension_parameters: Parameters supplied to the Create Virtual Machine Extension
operation. Required.
:type extension_parameters:
~azure.mgmt.compute.v2020_06_01.models.VirtualMachineScaleSetVMExtension
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VirtualMachineScaleSetVMExtension or
the result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2020_06_01.models.VirtualMachineScaleSetVMExtension]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def begin_create_or_update(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
vm_extension_name: str,
extension_parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.VirtualMachineScaleSetVMExtension]:
"""The operation to create or update the VMSS VM extension.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set. Required.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine. Required.
:type instance_id: str
:param vm_extension_name: The name of the virtual machine extension. Required.
:type vm_extension_name: str
:param extension_parameters: Parameters supplied to the Create Virtual Machine Extension
operation. Required.
:type extension_parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VirtualMachineScaleSetVMExtension or
the result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2020_06_01.models.VirtualMachineScaleSetVMExtension]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def begin_create_or_update(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
vm_extension_name: str,
extension_parameters: Union[_models.VirtualMachineScaleSetVMExtension, IO],
**kwargs: Any
) -> AsyncLROPoller[_models.VirtualMachineScaleSetVMExtension]:
"""The operation to create or update the VMSS VM extension.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set. Required.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine. Required.
:type instance_id: str
:param vm_extension_name: The name of the virtual machine extension. Required.
:type vm_extension_name: str
:param extension_parameters: Parameters supplied to the Create Virtual Machine Extension
operation. Is either a model type or a IO type. Required.
:type extension_parameters:
~azure.mgmt.compute.v2020_06_01.models.VirtualMachineScaleSetVMExtension or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VirtualMachineScaleSetVMExtension or
the result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2020_06_01.models.VirtualMachineScaleSetVMExtension]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-06-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.VirtualMachineScaleSetVMExtension]
polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial( # type: ignore
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
vm_extension_name=vm_extension_name,
extension_parameters=extension_parameters,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("VirtualMachineScaleSetVMExtension", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualMachines/{instanceId}/extensions/{vmExtensionName}"} # type: ignore
async def _update_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
vm_extension_name: str,
extension_parameters: Union[_models.VirtualMachineScaleSetVMExtensionUpdate, IO],
**kwargs: Any
) -> _models.VirtualMachineScaleSetVMExtension:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-06-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.VirtualMachineScaleSetVMExtension]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(extension_parameters, (IO, bytes)):
_content = extension_parameters
else:
_json = self._serialize.body(extension_parameters, "VirtualMachineScaleSetVMExtensionUpdate")
request = build_update_request(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
vm_extension_name=vm_extension_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._update_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("VirtualMachineScaleSetVMExtension", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualMachines/{instanceId}/extensions/{vmExtensionName}"} # type: ignore
@overload
async def begin_update(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
vm_extension_name: str,
extension_parameters: _models.VirtualMachineScaleSetVMExtensionUpdate,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.VirtualMachineScaleSetVMExtension]:
"""The operation to update the VMSS VM extension.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set. Required.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine. Required.
:type instance_id: str
:param vm_extension_name: The name of the virtual machine extension. Required.
:type vm_extension_name: str
:param extension_parameters: Parameters supplied to the Update Virtual Machine Extension
operation. Required.
:type extension_parameters:
~azure.mgmt.compute.v2020_06_01.models.VirtualMachineScaleSetVMExtensionUpdate
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VirtualMachineScaleSetVMExtension or
the result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2020_06_01.models.VirtualMachineScaleSetVMExtension]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def begin_update(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
vm_extension_name: str,
extension_parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.VirtualMachineScaleSetVMExtension]:
"""The operation to update the VMSS VM extension.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set. Required.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine. Required.
:type instance_id: str
:param vm_extension_name: The name of the virtual machine extension. Required.
:type vm_extension_name: str
:param extension_parameters: Parameters supplied to the Update Virtual Machine Extension
operation. Required.
:type extension_parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VirtualMachineScaleSetVMExtension or
the result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2020_06_01.models.VirtualMachineScaleSetVMExtension]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def begin_update(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
vm_extension_name: str,
extension_parameters: Union[_models.VirtualMachineScaleSetVMExtensionUpdate, IO],
**kwargs: Any
) -> AsyncLROPoller[_models.VirtualMachineScaleSetVMExtension]:
"""The operation to update the VMSS VM extension.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set. Required.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine. Required.
:type instance_id: str
:param vm_extension_name: The name of the virtual machine extension. Required.
:type vm_extension_name: str
:param extension_parameters: Parameters supplied to the Update Virtual Machine Extension
operation. Is either a model type or a IO type. Required.
:type extension_parameters:
~azure.mgmt.compute.v2020_06_01.models.VirtualMachineScaleSetVMExtensionUpdate or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VirtualMachineScaleSetVMExtension or
the result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2020_06_01.models.VirtualMachineScaleSetVMExtension]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-06-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.VirtualMachineScaleSetVMExtension]
polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = await self._update_initial( # type: ignore
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
vm_extension_name=vm_extension_name,
extension_parameters=extension_parameters,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("VirtualMachineScaleSetVMExtension", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualMachines/{instanceId}/extensions/{vmExtensionName}"} # type: ignore
async def _delete_initial( # pylint: disable=inconsistent-return-statements
self, resource_group_name: str, vm_scale_set_name: str, instance_id: str, vm_extension_name: str, **kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-06-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_delete_request(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
vm_extension_name=vm_extension_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualMachines/{instanceId}/extensions/{vmExtensionName}"} # type: ignore
@distributed_trace_async
async def begin_delete(
self, resource_group_name: str, vm_scale_set_name: str, instance_id: str, vm_extension_name: str, **kwargs: Any
) -> AsyncLROPoller[None]:
"""The operation to delete the VMSS VM extension.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set. Required.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine. Required.
:type instance_id: str
:param vm_extension_name: The name of the virtual machine extension. Required.
:type vm_extension_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-06-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial( # type: ignore
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
vm_extension_name=vm_extension_name,
api_version=api_version,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualMachines/{instanceId}/extensions/{vmExtensionName}"} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
vm_extension_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> _models.VirtualMachineScaleSetVMExtension:
"""The operation to get the VMSS VM extension.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set. Required.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine. Required.
:type instance_id: str
:param vm_extension_name: The name of the virtual machine extension. Required.
:type vm_extension_name: str
:param expand: The expand expression to apply on the operation. Default value is None.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualMachineScaleSetVMExtension or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2020_06_01.models.VirtualMachineScaleSetVMExtension
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-06-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.VirtualMachineScaleSetVMExtension]
request = build_get_request(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
vm_extension_name=vm_extension_name,
subscription_id=self._config.subscription_id,
expand=expand,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("VirtualMachineScaleSetVMExtension", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualMachines/{instanceId}/extensions/{vmExtensionName}"} # type: ignore
@distributed_trace_async
async def list(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
expand: Optional[str] = None,
**kwargs: Any
) -> _models.VirtualMachineScaleSetVMExtensionsListResult:
"""The operation to get all extensions of an instance in Virtual Machine Scaleset.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set. Required.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine. Required.
:type instance_id: str
:param expand: The expand expression to apply on the operation. Default value is None.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualMachineScaleSetVMExtensionsListResult or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2020_06_01.models.VirtualMachineScaleSetVMExtensionsListResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-06-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.VirtualMachineScaleSetVMExtensionsListResult]
request = build_list_request(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
expand=expand,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("VirtualMachineScaleSetVMExtensionsListResult", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualMachines/{instanceId}/extensions"} # type: ignore
| {
"content_hash": "a6caf029de5aec0093e4d09cc8517a1e",
"timestamp": "",
"source": "github",
"line_count": 804,
"max_line_length": 261,
"avg_line_length": 49.17412935323383,
"alnum_prop": 0.6600819506272764,
"repo_name": "Azure/azure-sdk-for-python",
"id": "9a564c859a1c77cc6c71d92ee5f89b49a4cb001a",
"size": "40036",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2020_06_01/aio/operations/_virtual_machine_scale_set_vm_extensions_operations.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
} |