source_code
stringlengths
45
7.24k
function_name
stringlengths
1
111
messages
list
def x(threshold_method, threshold_modifier, image, mask=None, labels=None, threshold_range_min=None, threshold_range_max=None, threshold_correction_factor=1.0, adaptive_window_size=10, **kwargs): global_threshold = get_global_threshold(threshold_method, image, mask, **kwargs) global_threshold *= threshold_correction_factor if not threshold_range_min is None: global_threshold = max(global_threshold, threshold_range_min) if not threshold_range_max is None: global_threshold = min(global_threshold, threshold_range_max) if threshold_modifier == TM_GLOBAL: local_threshold = global_threshold elif threshold_modifier == TM_ADAPTIVE: local_threshold = get_adaptive_threshold(threshold_method, image, global_threshold, mask, adaptive_window_size= adaptive_window_size, **kwargs) local_threshold = local_threshold * threshold_correction_factor elif threshold_modifier == TM_PER_OBJECT: local_threshold = get_per_object_threshold(threshold_method, image, global_threshold, mask, labels, threshold_range_min, threshold_range_max, **kwargs) local_threshold = local_threshold * threshold_correction_factor else: raise NotImplementedError('%s thresholding is not implemented' % threshold_modifier) if isinstance(local_threshold, np.ndarray): threshold_range_min = max(threshold_range_min, global_threshold * 0.7) threshold_range_max = min(threshold_range_max, global_threshold * 1.5) if not threshold_range_min is None: local_threshold[local_threshold < threshold_range_min ] = threshold_range_min if not threshold_range_max is None: local_threshold[local_threshold > threshold_range_max ] = threshold_range_max if threshold_modifier == TM_PER_OBJECT and labels is not None: local_threshold[labels == 0] = 1.0 else: if not threshold_range_min is None: local_threshold = max(local_threshold, threshold_range_min) if not threshold_range_max is None: local_threshold = min(local_threshold, threshold_range_max) return local_threshold, global_threshold
get_threshold
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(threshold_method, threshold_modifier, image, mask=None, labels=None,\n threshold_range_min=None, threshold_range_max=None,\n threshold_correction_factor=1.0, adaptive_window_size=10, **kwargs):\n global_threshold = get_global_threshold(threshold_method, image, mask,\n **kwargs)\n global_threshold *= threshold_correction_factor\n if not threshold_range_min is None:\n global_threshold = max(global_threshold, threshold_range_min)\n if not threshold_range_max is None:\n global_threshold = min(global_threshold, threshold_range_max)\n if threshold_modifier == TM_GLOBAL:\n local_threshold = global_threshold\n elif threshold_modifier == TM_ADAPTIVE:\n local_threshold = get_adaptive_threshold(threshold_method, image,\n global_threshold, mask, adaptive_window_size=\n adaptive_window_size, **kwargs)\n local_threshold = local_threshold * threshold_correction_factor\n elif threshold_modifier == TM_PER_OBJECT:\n local_threshold = get_per_object_threshold(threshold_method, image,\n global_threshold, mask, labels, threshold_range_min,\n threshold_range_max, **kwargs)\n local_threshold = local_threshold * threshold_correction_factor\n else:\n raise NotImplementedError('%s thresholding is not implemented' %\n threshold_modifier)\n if isinstance(local_threshold, np.ndarray):\n threshold_range_min = max(threshold_range_min, global_threshold * 0.7)\n threshold_range_max = min(threshold_range_max, global_threshold * 1.5)\n if not threshold_range_min is None:\n local_threshold[local_threshold < threshold_range_min\n ] = threshold_range_min\n if not threshold_range_max is None:\n local_threshold[local_threshold > threshold_range_max\n ] = threshold_range_max\n if threshold_modifier == TM_PER_OBJECT and labels is not None:\n local_threshold[labels == 0] = 1.0\n else:\n if not threshold_range_min is None:\n local_threshold = max(local_threshold, threshold_range_min)\n if not threshold_range_max is None:\n local_threshold = min(local_threshold, threshold_range_max)\n return local_threshold, global_threshold\n", "role": "user" }, { "content": "get_threshold", "role": "assistant" } ]
def x(threshold_method, image, mask=None, **kwargs): if mask is not None and not np.any(mask): return 1 if threshold_method == TM_OTSU: fn = get_otsu_threshold elif threshold_method == TM_MOG: fn = get_mog_threshold elif threshold_method == TM_BACKGROUND: fn = get_background_threshold elif threshold_method == TM_ROBUST_BACKGROUND: fn = get_robust_background_threshold elif threshold_method == TM_RIDLER_CALVARD: fn = get_ridler_calvard_threshold elif threshold_method == TM_KAPUR: fn = get_kapur_threshold elif threshold_method == TM_MCT: fn = get_maximum_correlation_threshold else: raise NotImplementedError('%s algorithm not implemented' % threshold_method) kwargs = dict([(k, v) for k, v in kwargs.items() if k in fn.args]) return fn(image, mask, **kwargs)
get_global_threshold
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(threshold_method, image, mask=None, **kwargs):\n if mask is not None and not np.any(mask):\n return 1\n if threshold_method == TM_OTSU:\n fn = get_otsu_threshold\n elif threshold_method == TM_MOG:\n fn = get_mog_threshold\n elif threshold_method == TM_BACKGROUND:\n fn = get_background_threshold\n elif threshold_method == TM_ROBUST_BACKGROUND:\n fn = get_robust_background_threshold\n elif threshold_method == TM_RIDLER_CALVARD:\n fn = get_ridler_calvard_threshold\n elif threshold_method == TM_KAPUR:\n fn = get_kapur_threshold\n elif threshold_method == TM_MCT:\n fn = get_maximum_correlation_threshold\n else:\n raise NotImplementedError('%s algorithm not implemented' %\n threshold_method)\n kwargs = dict([(k, v) for k, v in kwargs.items() if k in fn.args])\n return fn(image, mask, **kwargs)\n", "role": "user" }, { "content": "get_global_threshold", "role": "assistant" } ]
def x(threshold_method, image, threshold, mask=None, adaptive_window_size= 10, **kwargs): image_size = np.array(image.shape[:2], dtype=int) nblocks = image_size // adaptive_window_size increment = np.array(image_size, dtype=float) / np.array(nblocks, dtype =float) thresh_out = np.zeros(image_size, image.dtype) block_threshold = np.zeros([nblocks[0], nblocks[1]]) for i in range(nblocks[0]): i0 = int(i * increment[0]) i1 = int((i + 1) * increment[0]) for j in range(nblocks[1]): j0 = int(j * increment[1]) j1 = int((j + 1) * increment[1]) block = image[i0:i1, j0:j1] block_mask = None if mask is None else mask[i0:i1, j0:j1] block_threshold[i, j] = get_global_threshold(threshold_method, block, mask=block_mask, **kwargs) spline_order = min(3, np.min(nblocks) - 1) xStart = int(increment[0] / 2) xEnd = int((nblocks[0] - 0.5) * increment[0]) yStart = int(increment[1] / 2) yEnd = int((nblocks[1] - 0.5) * increment[1]) xtStart = 0.5 xtEnd = image.shape[0] - 0.5 ytStart = 0.5 ytEnd = image.shape[1] - 0.5 block_x_coords = np.linspace(xStart, xEnd, nblocks[0]) block_y_coords = np.linspace(yStart, yEnd, nblocks[1]) adaptive_interpolation = scipy.interpolate.RectBivariateSpline( block_x_coords, block_y_coords, block_threshold, bbox=(xtStart, xtEnd, ytStart, ytEnd), kx=spline_order, ky=spline_order) thresh_out_x_coords = np.linspace(0.5, int(nblocks[0] * increment[0]) - 0.5, thresh_out.shape[0]) thresh_out_y_coords = np.linspace(0.5, int(nblocks[1] * increment[1]) - 0.5, thresh_out.shape[1]) thresh_out = adaptive_interpolation(thresh_out_x_coords, thresh_out_y_coords) return thresh_out
get_adaptive_threshold
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(threshold_method, image, threshold, mask=None, adaptive_window_size=\n 10, **kwargs):\n image_size = np.array(image.shape[:2], dtype=int)\n nblocks = image_size // adaptive_window_size\n increment = np.array(image_size, dtype=float) / np.array(nblocks, dtype\n =float)\n thresh_out = np.zeros(image_size, image.dtype)\n block_threshold = np.zeros([nblocks[0], nblocks[1]])\n for i in range(nblocks[0]):\n i0 = int(i * increment[0])\n i1 = int((i + 1) * increment[0])\n for j in range(nblocks[1]):\n j0 = int(j * increment[1])\n j1 = int((j + 1) * increment[1])\n block = image[i0:i1, j0:j1]\n block_mask = None if mask is None else mask[i0:i1, j0:j1]\n block_threshold[i, j] = get_global_threshold(threshold_method,\n block, mask=block_mask, **kwargs)\n spline_order = min(3, np.min(nblocks) - 1)\n xStart = int(increment[0] / 2)\n xEnd = int((nblocks[0] - 0.5) * increment[0])\n yStart = int(increment[1] / 2)\n yEnd = int((nblocks[1] - 0.5) * increment[1])\n xtStart = 0.5\n xtEnd = image.shape[0] - 0.5\n ytStart = 0.5\n ytEnd = image.shape[1] - 0.5\n block_x_coords = np.linspace(xStart, xEnd, nblocks[0])\n block_y_coords = np.linspace(yStart, yEnd, nblocks[1])\n adaptive_interpolation = scipy.interpolate.RectBivariateSpline(\n block_x_coords, block_y_coords, block_threshold, bbox=(xtStart,\n xtEnd, ytStart, ytEnd), kx=spline_order, ky=spline_order)\n thresh_out_x_coords = np.linspace(0.5, int(nblocks[0] * increment[0]) -\n 0.5, thresh_out.shape[0])\n thresh_out_y_coords = np.linspace(0.5, int(nblocks[1] * increment[1]) -\n 0.5, thresh_out.shape[1])\n thresh_out = adaptive_interpolation(thresh_out_x_coords,\n thresh_out_y_coords)\n return thresh_out\n", "role": "user" }, { "content": "get_adaptive_threshold", "role": "assistant" } ]
def x(method, image, threshold, mask=None, labels=None, threshold_range_min =None, threshold_range_max=None, **kwargs): if labels is None: labels = np.ones(image.shape, int) if not mask is None: labels[np.logical_not(mask)] = 0 label_extents = scipy.ndimage.find_objects(labels, np.max(labels)) local_threshold = np.ones(image.shape, image.dtype) for i, extent in enumerate(label_extents, start=1): label_mask = labels[extent] == i if not mask is None: label_mask = np.logical_and(mask[extent], label_mask) values = image[extent] per_object_threshold = get_global_threshold(method, values, mask= label_mask, **kwargs) local_threshold[extent][label_mask] = per_object_threshold return local_threshold
get_per_object_threshold
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(method, image, threshold, mask=None, labels=None, threshold_range_min\n =None, threshold_range_max=None, **kwargs):\n if labels is None:\n labels = np.ones(image.shape, int)\n if not mask is None:\n labels[np.logical_not(mask)] = 0\n label_extents = scipy.ndimage.find_objects(labels, np.max(labels))\n local_threshold = np.ones(image.shape, image.dtype)\n for i, extent in enumerate(label_extents, start=1):\n label_mask = labels[extent] == i\n if not mask is None:\n label_mask = np.logical_and(mask[extent], label_mask)\n values = image[extent]\n per_object_threshold = get_global_threshold(method, values, mask=\n label_mask, **kwargs)\n local_threshold[extent][label_mask] = per_object_threshold\n return local_threshold\n", "role": "user" }, { "content": "get_per_object_threshold", "role": "assistant" } ]
def x(image, mask=None, object_fraction=0.2): cropped_image = np.array(image.flat) if mask is None else image[mask] pixel_count = np.product(cropped_image.shape) max_count = 512 ** 2 object_fraction = float(object_fraction) background_fraction = 1.0 - object_fraction if pixel_count < 3 / min(object_fraction, background_fraction): return 1 if np.max(cropped_image) == np.min(cropped_image): return cropped_image[0] number_of_classes = 3 if pixel_count > max_count: np.random.seed(0) pixel_indices = np.random.permutation(pixel_count)[:max_count] cropped_image = cropped_image[pixel_indices] cropped_image.sort() one_percent = (np.product(cropped_image.shape) + 99) // 100 cropped_image = cropped_image[one_percent:-one_percent] pixel_count = np.product(cropped_image.shape) bg_pixel = cropped_image[int(round(pixel_count * background_fraction / 2.0))] fg_pixel = cropped_image[int(round(pixel_count * (1 - object_fraction / 2)))] class_mean = np.array([bg_pixel, (bg_pixel + fg_pixel) / 2, fg_pixel]) class_std = np.ones((3,)) * 0.15 class_prob = np.array([3.0 / 4.0 * background_fraction, 1.0 / 4.0, 3.0 / 4.0 * object_fraction]) class_count = np.prod(class_mean.shape) r = np.random.RandomState() r.seed(np.frombuffer(cropped_image[:100].data, np.uint8).tolist()) for data in (r.permutation(cropped_image)[0:len(cropped_image) // 10], cropped_image): delta = 1 pixel_count = len(data) while delta > 0.001: old_class_mean = class_mean.copy() pixel_class_prob = np.ndarray((pixel_count, class_count)) for k in range(class_count): norm = scipy.stats.norm(class_mean[k], class_std[k]) pixel_class_prob[:, k] = class_prob[k] * norm.pdf(data) pixel_class_normalizer = np.sum(pixel_class_prob, 1) + 1e-12 for k in range(class_count): pixel_class_prob[:, k] = pixel_class_prob[:, k ] / pixel_class_normalizer class_prob[k] = np.mean(pixel_class_prob[:, k]) class_mean[k] = np.sum(pixel_class_prob[:, k] * data) / ( class_prob[k] * pixel_count) class_std[k] = math.sqrt(np.sum(pixel_class_prob[:, k] * ( data - class_mean[k]) ** 2) / (pixel_count * class_prob[k]) ) + 1e-06 delta = np.sum(np.abs(old_class_mean - class_mean)) ndivisions = 10000 level = np.arange(ndivisions) * ((class_mean[2] - class_mean[0]) / ndivisions) + class_mean[0] class_gaussian = np.ndarray((ndivisions, class_count)) for k in range(class_count): norm = scipy.stats.norm(class_mean[k], class_std[k]) class_gaussian[:, k] = class_prob[k] * norm.pdf(level) if abs(class_prob[1] + class_prob[2] - object_fraction) < abs( class_prob[2] - object_fraction): background_distribution = class_gaussian[:, 0] object_distribution = class_gaussian[:, 1] + class_gaussian[:, 2] else: background_distribution = class_gaussian[:, 0] + class_gaussian[:, 1] object_distribution = class_gaussian[:, 2] index = np.argmin(np.abs(background_distribution - object_distribution)) return level[index]
get_mog_threshold
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(image, mask=None, object_fraction=0.2):\n cropped_image = np.array(image.flat) if mask is None else image[mask]\n pixel_count = np.product(cropped_image.shape)\n max_count = 512 ** 2\n object_fraction = float(object_fraction)\n background_fraction = 1.0 - object_fraction\n if pixel_count < 3 / min(object_fraction, background_fraction):\n return 1\n if np.max(cropped_image) == np.min(cropped_image):\n return cropped_image[0]\n number_of_classes = 3\n if pixel_count > max_count:\n np.random.seed(0)\n pixel_indices = np.random.permutation(pixel_count)[:max_count]\n cropped_image = cropped_image[pixel_indices]\n cropped_image.sort()\n one_percent = (np.product(cropped_image.shape) + 99) // 100\n cropped_image = cropped_image[one_percent:-one_percent]\n pixel_count = np.product(cropped_image.shape)\n bg_pixel = cropped_image[int(round(pixel_count * background_fraction / \n 2.0))]\n fg_pixel = cropped_image[int(round(pixel_count * (1 - object_fraction /\n 2)))]\n class_mean = np.array([bg_pixel, (bg_pixel + fg_pixel) / 2, fg_pixel])\n class_std = np.ones((3,)) * 0.15\n class_prob = np.array([3.0 / 4.0 * background_fraction, 1.0 / 4.0, 3.0 /\n 4.0 * object_fraction])\n class_count = np.prod(class_mean.shape)\n r = np.random.RandomState()\n r.seed(np.frombuffer(cropped_image[:100].data, np.uint8).tolist())\n for data in (r.permutation(cropped_image)[0:len(cropped_image) // 10],\n cropped_image):\n delta = 1\n pixel_count = len(data)\n while delta > 0.001:\n old_class_mean = class_mean.copy()\n pixel_class_prob = np.ndarray((pixel_count, class_count))\n for k in range(class_count):\n norm = scipy.stats.norm(class_mean[k], class_std[k])\n pixel_class_prob[:, k] = class_prob[k] * norm.pdf(data)\n pixel_class_normalizer = np.sum(pixel_class_prob, 1) + 1e-12\n for k in range(class_count):\n pixel_class_prob[:, k] = pixel_class_prob[:, k\n ] / pixel_class_normalizer\n class_prob[k] = np.mean(pixel_class_prob[:, k])\n class_mean[k] = np.sum(pixel_class_prob[:, k] * data) / (\n class_prob[k] * pixel_count)\n class_std[k] = math.sqrt(np.sum(pixel_class_prob[:, k] * (\n data - class_mean[k]) ** 2) / (pixel_count * class_prob[k])\n ) + 1e-06\n delta = np.sum(np.abs(old_class_mean - class_mean))\n ndivisions = 10000\n level = np.arange(ndivisions) * ((class_mean[2] - class_mean[0]) /\n ndivisions) + class_mean[0]\n class_gaussian = np.ndarray((ndivisions, class_count))\n for k in range(class_count):\n norm = scipy.stats.norm(class_mean[k], class_std[k])\n class_gaussian[:, k] = class_prob[k] * norm.pdf(level)\n if abs(class_prob[1] + class_prob[2] - object_fraction) < abs(\n class_prob[2] - object_fraction):\n background_distribution = class_gaussian[:, 0]\n object_distribution = class_gaussian[:, 1] + class_gaussian[:, 2]\n else:\n background_distribution = class_gaussian[:, 0] + class_gaussian[:, 1]\n object_distribution = class_gaussian[:, 2]\n index = np.argmin(np.abs(background_distribution - object_distribution))\n return level[index]\n", "role": "user" }, { "content": "get_mog_threshold", "role": "assistant" } ]
def x(image, mask=None): cropped_image = np.array(image.flat) if mask is None else image[mask] if np.product(cropped_image.shape) == 0: return 0 img_min = np.min(cropped_image) img_max = np.max(cropped_image) if img_min == img_max: return cropped_image[0] robust_min = 0.02 * (img_max - img_min) + img_min robust_max = 0.98 * (img_max - img_min) + img_min nbins = 256 cropped_image = cropped_image[np.logical_and(cropped_image > robust_min, cropped_image < robust_max)] if len(cropped_image) == 0: return robust_min h = scipy.ndimage.histogram(cropped_image, robust_min, robust_max, nbins) index = np.argmax(h) cutoff = float(index) / float(nbins - 1) cutoff = (cutoff + 0.02) / 1.04 return img_min + cutoff * 2 * (img_max - img_min)
get_background_threshold
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(image, mask=None):\n cropped_image = np.array(image.flat) if mask is None else image[mask]\n if np.product(cropped_image.shape) == 0:\n return 0\n img_min = np.min(cropped_image)\n img_max = np.max(cropped_image)\n if img_min == img_max:\n return cropped_image[0]\n robust_min = 0.02 * (img_max - img_min) + img_min\n robust_max = 0.98 * (img_max - img_min) + img_min\n nbins = 256\n cropped_image = cropped_image[np.logical_and(cropped_image > robust_min,\n cropped_image < robust_max)]\n if len(cropped_image) == 0:\n return robust_min\n h = scipy.ndimage.histogram(cropped_image, robust_min, robust_max, nbins)\n index = np.argmax(h)\n cutoff = float(index) / float(nbins - 1)\n cutoff = (cutoff + 0.02) / 1.04\n return img_min + cutoff * 2 * (img_max - img_min)\n", "role": "user" }, { "content": "get_background_threshold", "role": "assistant" } ]
def x(image, mask=None, lower_outlier_fraction=0.05, upper_outlier_fraction =0.05, deviations_above_average=2.0, average_fn=np.mean, variance_fn=np.std ): cropped_image = np.array(image.flat) if mask is None else image[mask] n_pixels = np.product(cropped_image.shape) if n_pixels < 3: return 0 cropped_image.sort() if cropped_image[0] == cropped_image[-1]: return cropped_image[0] low_chop = int(round(n_pixels * lower_outlier_fraction)) hi_chop = n_pixels - int(round(n_pixels * upper_outlier_fraction)) im = cropped_image if low_chop == 0 else cropped_image[low_chop:hi_chop] mean = average_fn(im) sd = variance_fn(im) return mean + sd * deviations_above_average
get_robust_background_threshold
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(image, mask=None, lower_outlier_fraction=0.05, upper_outlier_fraction\n =0.05, deviations_above_average=2.0, average_fn=np.mean, variance_fn=np.std\n ):\n cropped_image = np.array(image.flat) if mask is None else image[mask]\n n_pixels = np.product(cropped_image.shape)\n if n_pixels < 3:\n return 0\n cropped_image.sort()\n if cropped_image[0] == cropped_image[-1]:\n return cropped_image[0]\n low_chop = int(round(n_pixels * lower_outlier_fraction))\n hi_chop = n_pixels - int(round(n_pixels * upper_outlier_fraction))\n im = cropped_image if low_chop == 0 else cropped_image[low_chop:hi_chop]\n mean = average_fn(im)\n sd = variance_fn(im)\n return mean + sd * deviations_above_average\n", "role": "user" }, { "content": "get_robust_background_threshold", "role": "assistant" } ]
def x(a): a = np.asfarray(a).flatten() return np.median(np.abs(a - np.median(a)))
mad
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(a):\n a = np.asfarray(a).flatten()\n return np.median(np.abs(a - np.median(a)))\n", "role": "user" }, { "content": "mad", "role": "assistant" } ]
def x(a): a = np.asarray(a).flatten() a_min = np.min(a) a_max = np.max(a) n_bins = np.ceil(np.sqrt(len(a))) b = ((a - a_min) / (a_max - a_min) * n_bins).astype(int) idx = np.argmax(np.bincount(b)) return np.percentile(a, 100 * float(idx + 0.5) / n_bins)
binned_mode
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(a):\n a = np.asarray(a).flatten()\n a_min = np.min(a)\n a_max = np.max(a)\n n_bins = np.ceil(np.sqrt(len(a)))\n b = ((a - a_min) / (a_max - a_min) * n_bins).astype(int)\n idx = np.argmax(np.bincount(b))\n return np.percentile(a, 100 * float(idx + 0.5) / n_bins)\n", "role": "user" }, { "content": "binned_mode", "role": "assistant" } ]
def x(image, mask=None): cropped_image = np.array(image.flat) if mask is None else image[mask] if np.product(cropped_image.shape) < 3: return 0 if np.min(cropped_image) == np.max(cropped_image): return cropped_image[0] min_val = np.max(cropped_image) / 256 cropped_image[cropped_image < min_val] = min_val im = np.log(cropped_image) min_val = np.min(im) max_val = np.max(im) im = (im - min_val) / (max_val - min_val) pre_thresh = 0 new_thresh = otsu(im) delta = 1e-05 while abs(pre_thresh - new_thresh) > delta: pre_thresh = new_thresh mean1 = np.mean(im[im < pre_thresh]) mean2 = np.mean(im[im >= pre_thresh]) new_thresh = np.mean([mean1, mean2]) return math.exp(min_val + (max_val - min_val) * new_thresh)
get_ridler_calvard_threshold
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(image, mask=None):\n cropped_image = np.array(image.flat) if mask is None else image[mask]\n if np.product(cropped_image.shape) < 3:\n return 0\n if np.min(cropped_image) == np.max(cropped_image):\n return cropped_image[0]\n min_val = np.max(cropped_image) / 256\n cropped_image[cropped_image < min_val] = min_val\n im = np.log(cropped_image)\n min_val = np.min(im)\n max_val = np.max(im)\n im = (im - min_val) / (max_val - min_val)\n pre_thresh = 0\n new_thresh = otsu(im)\n delta = 1e-05\n while abs(pre_thresh - new_thresh) > delta:\n pre_thresh = new_thresh\n mean1 = np.mean(im[im < pre_thresh])\n mean2 = np.mean(im[im >= pre_thresh])\n new_thresh = np.mean([mean1, mean2])\n return math.exp(min_val + (max_val - min_val) * new_thresh)\n", "role": "user" }, { "content": "get_ridler_calvard_threshold", "role": "assistant" } ]
def x(image, mask=None): cropped_image = np.array(image.flat) if mask is None else image[mask] if np.product(cropped_image.shape) < 3: return 0 if np.min(cropped_image) == np.max(cropped_image): return cropped_image[0] log_image = np.log2(smooth_with_noise(cropped_image, 8)) min_log_image = np.min(log_image) max_log_image = np.max(log_image) histogram = scipy.ndimage.histogram(log_image, min_log_image, max_log_image, 256) histogram_values = min_log_image + (max_log_image - min_log_image ) * np.arange(256, dtype=float) / 255 keep = histogram != 0 histogram = histogram[keep] histogram_values = histogram_values[keep] if np.product(histogram_values) == 1: return 2 ** histogram_values[0] p = histogram.astype(float) / float(np.sum(histogram)) lo_sum = np.cumsum(p) hi_sum = lo_sum[-1] - lo_sum lo_e = np.cumsum(p * np.log2(p)) hi_e = lo_e[-1] - lo_e lo_entropy = lo_e / lo_sum - np.log2(lo_sum) hi_entropy = hi_e / hi_sum - np.log2(hi_sum) sum_entropy = lo_entropy[:-1] + hi_entropy[:-1] sum_entropy[np.logical_not(np.isfinite(sum_entropy))] = np.Inf entry = np.argmin(sum_entropy) return 2 ** ((histogram_values[entry] + histogram_values[entry + 1]) / 2)
get_kapur_threshold
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(image, mask=None):\n cropped_image = np.array(image.flat) if mask is None else image[mask]\n if np.product(cropped_image.shape) < 3:\n return 0\n if np.min(cropped_image) == np.max(cropped_image):\n return cropped_image[0]\n log_image = np.log2(smooth_with_noise(cropped_image, 8))\n min_log_image = np.min(log_image)\n max_log_image = np.max(log_image)\n histogram = scipy.ndimage.histogram(log_image, min_log_image,\n max_log_image, 256)\n histogram_values = min_log_image + (max_log_image - min_log_image\n ) * np.arange(256, dtype=float) / 255\n keep = histogram != 0\n histogram = histogram[keep]\n histogram_values = histogram_values[keep]\n if np.product(histogram_values) == 1:\n return 2 ** histogram_values[0]\n p = histogram.astype(float) / float(np.sum(histogram))\n lo_sum = np.cumsum(p)\n hi_sum = lo_sum[-1] - lo_sum\n lo_e = np.cumsum(p * np.log2(p))\n hi_e = lo_e[-1] - lo_e\n lo_entropy = lo_e / lo_sum - np.log2(lo_sum)\n hi_entropy = hi_e / hi_sum - np.log2(hi_sum)\n sum_entropy = lo_entropy[:-1] + hi_entropy[:-1]\n sum_entropy[np.logical_not(np.isfinite(sum_entropy))] = np.Inf\n entry = np.argmin(sum_entropy)\n return 2 ** ((histogram_values[entry] + histogram_values[entry + 1]) / 2)\n", "role": "user" }, { "content": "get_kapur_threshold", "role": "assistant" } ]
def x(image, mask=None, bins=256): if mask is not None: image = image[mask] image = image.ravel() nm = len(image) if nm == 0: return 0 min_value = np.min(image) max_value = np.max(image) if min_value == max_value: return min_value image = ((image - min_value) * (bins - 1) / (max_value - min_value) ).astype(int) histogram = np.bincount(image) mean_value = np.mean(image) diff = np.arange(len(histogram)) - mean_value diff2 = diff * diff ndiff = histogram * diff ndiff2 = histogram * diff2 sndiff2 = np.sum(ndiff2) cndiff = np.cumsum(ndiff) numerator = np.hstack([[cndiff[-1]], cndiff[-1] - cndiff[:-1]]) ni = nm - np.hstack([[0], np.cumsum(histogram[:-1])]) denominator = np.sqrt(sndiff2 * (nm - ni) * ni / nm) mct = numerator / denominator mct[denominator == 0] = 0 my_bin = np.argmax(mct) - 1 return min_value + my_bin * (max_value - min_value) / (bins - 1)
get_maximum_correlation_threshold
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(image, mask=None, bins=256):\n if mask is not None:\n image = image[mask]\n image = image.ravel()\n nm = len(image)\n if nm == 0:\n return 0\n min_value = np.min(image)\n max_value = np.max(image)\n if min_value == max_value:\n return min_value\n image = ((image - min_value) * (bins - 1) / (max_value - min_value)\n ).astype(int)\n histogram = np.bincount(image)\n mean_value = np.mean(image)\n diff = np.arange(len(histogram)) - mean_value\n diff2 = diff * diff\n ndiff = histogram * diff\n ndiff2 = histogram * diff2\n sndiff2 = np.sum(ndiff2)\n cndiff = np.cumsum(ndiff)\n numerator = np.hstack([[cndiff[-1]], cndiff[-1] - cndiff[:-1]])\n ni = nm - np.hstack([[0], np.cumsum(histogram[:-1])])\n denominator = np.sqrt(sndiff2 * (nm - ni) * ni / nm)\n mct = numerator / denominator\n mct[denominator == 0] = 0\n my_bin = np.argmax(mct) - 1\n return min_value + my_bin * (max_value - min_value) / (bins - 1)\n", "role": "user" }, { "content": "get_maximum_correlation_threshold", "role": "assistant" } ]
def x(image, mask, binary_image): if not np.any(mask): return 0 minval = np.max(image[mask]) / 256 if minval == 0: return 0 fg = np.log2(np.maximum(image[binary_image & mask], minval)) bg = np.log2(np.maximum(image[~binary_image & mask], minval)) nfg = np.product(fg.shape) nbg = np.product(bg.shape) if nfg == 0: return np.var(bg) elif nbg == 0: return np.var(fg) else: return (np.var(fg) * nfg + np.var(bg) * nbg) / (nfg + nbg)
weighted_variance
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(image, mask, binary_image):\n if not np.any(mask):\n return 0\n minval = np.max(image[mask]) / 256\n if minval == 0:\n return 0\n fg = np.log2(np.maximum(image[binary_image & mask], minval))\n bg = np.log2(np.maximum(image[~binary_image & mask], minval))\n nfg = np.product(fg.shape)\n nbg = np.product(bg.shape)\n if nfg == 0:\n return np.var(bg)\n elif nbg == 0:\n return np.var(fg)\n else:\n return (np.var(fg) * nfg + np.var(bg) * nbg) / (nfg + nbg)\n", "role": "user" }, { "content": "weighted_variance", "role": "assistant" } ]
def x(image, mask, binary_image): mask = mask.copy() mask[np.isnan(image)] = False if not np.any(mask): return 0 minval = np.max(image[mask]) / 256 if minval == 0: return 0 clamped_image = image.copy() clamped_image[clamped_image < minval] = minval image = smooth_with_noise(clamped_image, 8) im_min = np.min(image) im_max = np.max(image) upper = np.log2(im_max) lower = np.log2(im_min) if upper == lower: return math.log(np.sum(mask), 2) fg = image[binary_image & mask] bg = image[~binary_image & mask] if len(fg) == 0 or len(bg) == 0: return 0 log_fg = np.log2(fg) log_bg = np.log2(bg) hfg = numpy_histogram(log_fg, 256, range=(lower, upper))[0] hbg = numpy_histogram(log_bg, 256, range=(lower, upper))[0] hfg = hfg[hfg > 0] hbg = hbg[hbg > 0] if np.product(hfg.shape) == 0: hfg = np.ones((1,), int) if np.product(hbg.shape) == 0: hbg = np.ones((1,), int) hfg = hfg.astype(float) / float(np.sum(hfg)) hbg = hbg.astype(float) / float(np.sum(hbg)) return np.sum(hfg * np.log2(hfg)) + np.sum(hbg * np.log2(hbg))
sum_of_entropies
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(image, mask, binary_image):\n mask = mask.copy()\n mask[np.isnan(image)] = False\n if not np.any(mask):\n return 0\n minval = np.max(image[mask]) / 256\n if minval == 0:\n return 0\n clamped_image = image.copy()\n clamped_image[clamped_image < minval] = minval\n image = smooth_with_noise(clamped_image, 8)\n im_min = np.min(image)\n im_max = np.max(image)\n upper = np.log2(im_max)\n lower = np.log2(im_min)\n if upper == lower:\n return math.log(np.sum(mask), 2)\n fg = image[binary_image & mask]\n bg = image[~binary_image & mask]\n if len(fg) == 0 or len(bg) == 0:\n return 0\n log_fg = np.log2(fg)\n log_bg = np.log2(bg)\n hfg = numpy_histogram(log_fg, 256, range=(lower, upper))[0]\n hbg = numpy_histogram(log_bg, 256, range=(lower, upper))[0]\n hfg = hfg[hfg > 0]\n hbg = hbg[hbg > 0]\n if np.product(hfg.shape) == 0:\n hfg = np.ones((1,), int)\n if np.product(hbg.shape) == 0:\n hbg = np.ones((1,), int)\n hfg = hfg.astype(float) / float(np.sum(hfg))\n hbg = hbg.astype(float) / float(np.sum(hbg))\n return np.sum(hfg * np.log2(hfg)) + np.sum(hbg * np.log2(hbg))\n", "role": "user" }, { "content": "sum_of_entropies", "role": "assistant" } ]
def x(image): orig_min, orig_max = scipy.ndimage.extrema(image)[:2] limage = image.copy() noise_min = orig_min + (orig_max - orig_min) / 256.0 + np.finfo(image.dtype ).eps limage[limage < noise_min] = noise_min d = {'noise_min': noise_min} limage = np.log(limage) log_min, log_max = scipy.ndimage.extrema(limage)[:2] d['log_min'] = log_min d['log_max'] = log_max return stretch(limage), d
log_transform
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(image):\n orig_min, orig_max = scipy.ndimage.extrema(image)[:2]\n limage = image.copy()\n noise_min = orig_min + (orig_max - orig_min) / 256.0 + np.finfo(image.dtype\n ).eps\n limage[limage < noise_min] = noise_min\n d = {'noise_min': noise_min}\n limage = np.log(limage)\n log_min, log_max = scipy.ndimage.extrema(limage)[:2]\n d['log_min'] = log_min\n d['log_max'] = log_max\n return stretch(limage), d\n", "role": "user" }, { "content": "log_transform", "role": "assistant" } ]
def x(a, bins=10, range=None, normed=False, weights=None): args = inspect.getargs(np.histogram.__code__)[0] if args[-1] == 'new': return np.histogram(a, bins, range, normed, weights, new=True) return np.histogram(a, bins, range, normed, weights)
numpy_histogram
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(a, bins=10, range=None, normed=False, weights=None):\n args = inspect.getargs(np.histogram.__code__)[0]\n if args[-1] == 'new':\n return np.histogram(a, bins, range, normed, weights, new=True)\n return np.histogram(a, bins, range, normed, weights)\n", "role": "user" }, { "content": "numpy_histogram", "role": "assistant" } ]
def x(image, nbins=None): flat_image = image.ravel() sort_order = flat_image.argsort().astype(np.uint32) flat_image = flat_image[sort_order] sort_rank = np.zeros_like(sort_order) is_different = flat_image[:-1] != flat_image[1:] np.cumsum(is_different, out=sort_rank[1:]) original_values = np.zeros((sort_rank[-1] + 1,), image.dtype) original_values[0] = flat_image[0] original_values[1:] = flat_image[1:][is_different] int_image = np.zeros_like(sort_order) int_image[sort_order] = sort_rank if nbins is not None: max_ranked_data = np.max(int_image) while max_ranked_data >= nbins: hist = np.bincount(int_image) order = np.argsort(hist) candidates = order[:max_ranked_data + 2 - nbins] to_delete = np.zeros(max_ranked_data + 2, bool) to_delete[candidates] = True td_mask = to_delete[:-1] & ((np.arange(max_ranked_data + 1) & 2 == 0) | ~to_delete[1:]) if td_mask[0]: td_mask[0] = False rd_translation = np.cumsum(~td_mask) - 1 int_image = rd_translation[int_image] original_values = original_values[~td_mask] max_ranked_data = len(original_values) - 1 return int_image.reshape(image.shape), original_values
rank_order
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(image, nbins=None):\n flat_image = image.ravel()\n sort_order = flat_image.argsort().astype(np.uint32)\n flat_image = flat_image[sort_order]\n sort_rank = np.zeros_like(sort_order)\n is_different = flat_image[:-1] != flat_image[1:]\n np.cumsum(is_different, out=sort_rank[1:])\n original_values = np.zeros((sort_rank[-1] + 1,), image.dtype)\n original_values[0] = flat_image[0]\n original_values[1:] = flat_image[1:][is_different]\n int_image = np.zeros_like(sort_order)\n int_image[sort_order] = sort_rank\n if nbins is not None:\n max_ranked_data = np.max(int_image)\n while max_ranked_data >= nbins:\n hist = np.bincount(int_image)\n order = np.argsort(hist)\n candidates = order[:max_ranked_data + 2 - nbins]\n to_delete = np.zeros(max_ranked_data + 2, bool)\n to_delete[candidates] = True\n td_mask = to_delete[:-1] & ((np.arange(max_ranked_data + 1) & 2 ==\n 0) | ~to_delete[1:])\n if td_mask[0]:\n td_mask[0] = False\n rd_translation = np.cumsum(~td_mask) - 1\n int_image = rd_translation[int_image]\n original_values = original_values[~td_mask]\n max_ranked_data = len(original_values) - 1\n return int_image.reshape(image.shape), original_values\n", "role": "user" }, { "content": "rank_order", "role": "assistant" } ]
def x(image, labels): nobjects = labels.max() objects = np.arange(nobjects + 1) lmin, lmax = scind.extrema(image, labels, objects)[:2] divisor = np.ones((nobjects + 1,)) divisor[lmax > lmin] = (lmax - lmin)[lmax > lmin] return (image - lmin[labels]) / divisor[labels]
normalized_per_object
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(image, labels):\n nobjects = labels.max()\n objects = np.arange(nobjects + 1)\n lmin, lmax = scind.extrema(image, labels, objects)[:2]\n divisor = np.ones((nobjects + 1,))\n divisor[lmax > lmin] = (lmax - lmin)[lmax > lmin]\n return (image - lmin[labels]) / divisor[labels]\n", "role": "user" }, { "content": "normalized_per_object", "role": "assistant" } ]
def x(image, nlevels): tmp = np.array(image // (1.0 / nlevels), dtype='i1') return tmp.clip(0, nlevels - 1)
quantize
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(image, nlevels):\n tmp = np.array(image // (1.0 / nlevels), dtype='i1')\n return tmp.clip(0, nlevels - 1)\n", "role": "user" }, { "content": "quantize", "role": "assistant" } ]
def x(quantized_image, labels, scale_i=3, scale_j=0): labels = labels.astype(int) nlevels = quantized_image.max() + 1 nobjects = labels.max() if scale_i < 0: scale_i = -scale_i scale_j = -scale_j if scale_i == 0 and scale_j > 0: image_a = quantized_image[:, :-scale_j] image_b = quantized_image[:, scale_j:] labels_ab = labels_a = labels[:, :-scale_j] labels_b = labels[:, scale_j:] elif scale_i > 0 and scale_j == 0: image_a = quantized_image[:-scale_i, :] image_b = quantized_image[scale_i:, :] labels_ab = labels_a = labels[:-scale_i, :] labels_b = labels[scale_i:, :] elif scale_i > 0 and scale_j > 0: image_a = quantized_image[:-scale_i, :-scale_j] image_b = quantized_image[scale_i:, scale_j:] labels_ab = labels_a = labels[:-scale_i, :-scale_j] labels_b = labels[scale_i:, scale_j:] else: image_a = quantized_image[:-scale_i, -scale_j:] image_b = quantized_image[scale_i:, :scale_j] labels_ab = labels_a = labels[:-scale_i, -scale_j:] labels_b = labels[scale_i:, :scale_j] equilabel = (labels_a == labels_b) & (labels_a > 0) if np.any(equilabel): Q = nlevels * nlevels * (labels_ab[equilabel] - 1) + nlevels * image_a[ equilabel] + image_b[equilabel] R = np.bincount(Q) if R.size != nobjects * nlevels * nlevels: S = np.zeros(nobjects * nlevels * nlevels - R.size) R = np.hstack((R, S)) P = R.reshape(nobjects, nlevels, nlevels) pixel_count = fix(scind.sum(equilabel, labels_ab, np.arange( nobjects, dtype=np.int32) + 1)) pixel_count = np.tile(pixel_count[:, np.newaxis, np.newaxis], (1, nlevels, nlevels)) return P.astype(float) / pixel_count.astype(float), nlevels else: return np.zeros((nobjects, nlevels, nlevels)), nlevels
cooccurrence
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(quantized_image, labels, scale_i=3, scale_j=0):\n labels = labels.astype(int)\n nlevels = quantized_image.max() + 1\n nobjects = labels.max()\n if scale_i < 0:\n scale_i = -scale_i\n scale_j = -scale_j\n if scale_i == 0 and scale_j > 0:\n image_a = quantized_image[:, :-scale_j]\n image_b = quantized_image[:, scale_j:]\n labels_ab = labels_a = labels[:, :-scale_j]\n labels_b = labels[:, scale_j:]\n elif scale_i > 0 and scale_j == 0:\n image_a = quantized_image[:-scale_i, :]\n image_b = quantized_image[scale_i:, :]\n labels_ab = labels_a = labels[:-scale_i, :]\n labels_b = labels[scale_i:, :]\n elif scale_i > 0 and scale_j > 0:\n image_a = quantized_image[:-scale_i, :-scale_j]\n image_b = quantized_image[scale_i:, scale_j:]\n labels_ab = labels_a = labels[:-scale_i, :-scale_j]\n labels_b = labels[scale_i:, scale_j:]\n else:\n image_a = quantized_image[:-scale_i, -scale_j:]\n image_b = quantized_image[scale_i:, :scale_j]\n labels_ab = labels_a = labels[:-scale_i, -scale_j:]\n labels_b = labels[scale_i:, :scale_j]\n equilabel = (labels_a == labels_b) & (labels_a > 0)\n if np.any(equilabel):\n Q = nlevels * nlevels * (labels_ab[equilabel] - 1) + nlevels * image_a[\n equilabel] + image_b[equilabel]\n R = np.bincount(Q)\n if R.size != nobjects * nlevels * nlevels:\n S = np.zeros(nobjects * nlevels * nlevels - R.size)\n R = np.hstack((R, S))\n P = R.reshape(nobjects, nlevels, nlevels)\n pixel_count = fix(scind.sum(equilabel, labels_ab, np.arange(\n nobjects, dtype=np.int32) + 1))\n pixel_count = np.tile(pixel_count[:, np.newaxis, np.newaxis], (1,\n nlevels, nlevels))\n return P.astype(float) / pixel_count.astype(float), nlevels\n else:\n return np.zeros((nobjects, nlevels, nlevels)), nlevels\n", "role": "user" }, { "content": "cooccurrence", "role": "assistant" } ]
def x(self): multiplied = np.dot(self.levels[:, np.newaxis] + 1, self.levels[np. newaxis] + 1) repeated = np.tile(multiplied[np.newaxis], (self.nobjects, 1, 1)) summed = (repeated * self.P).sum(2).sum(1) h3 = (summed - self.mux * self.muy) / (self.sigmax * self.sigmay) h3[np.isinf(h3)] = 0 return h3
H3
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n multiplied = np.dot(self.levels[:, np.newaxis] + 1, self.levels[np.\n newaxis] + 1)\n repeated = np.tile(multiplied[np.newaxis], (self.nobjects, 1, 1))\n summed = (repeated * self.P).sum(2).sum(1)\n h3 = (summed - self.mux * self.muy) / (self.sigmax * self.sigmay)\n h3[np.isinf(h3)] = 0\n return h3\n", "role": "user" }, { "content": "H3", "role": "assistant" } ]
def x(self): t = 1 + toeplitz(self.levels) ** 2 repeated = np.tile(t[np.newaxis], (self.nobjects, 1, 1)) return (1.0 / repeated * self.P).sum(2).sum(1)
H5
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n t = 1 + toeplitz(self.levels) ** 2\n repeated = np.tile(t[np.newaxis], (self.nobjects, 1, 1))\n return (1.0 / repeated * self.P).sum(2).sum(1)\n", "role": "user" }, { "content": "H5", "role": "assistant" } ]
def x(self): if not hasattr(self, '_H6'): self._H6 = ((self.rlevels2 + 2) * self.p_xplusy).sum(1) return self._H6
H6
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n if not hasattr(self, '_H6'):\n self._H6 = ((self.rlevels2 + 2) * self.p_xplusy).sum(1)\n return self._H6\n", "role": "user" }, { "content": "H6", "role": "assistant" } ]
def x(self): h6 = np.tile(self.H6(), (self.rlevels2.shape[1], 1)).transpose() return ((self.rlevels2 + 2 - h6) ** 2 * self.p_xplusy).sum(1)
H7
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n h6 = np.tile(self.H6(), (self.rlevels2.shape[1], 1)).transpose()\n return ((self.rlevels2 + 2 - h6) ** 2 * self.p_xplusy).sum(1)\n", "role": "user" }, { "content": "H7", "role": "assistant" } ]
def x(self): return -(self.p_xplusy * np.log(self.p_xplusy + self.eps)).sum(1)
H8
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n return -(self.p_xplusy * np.log(self.p_xplusy + self.eps)).sum(1)\n", "role": "user" }, { "content": "H8", "role": "assistant" } ]
def x(self): if not hasattr(self, '_H9'): self._H9 = -(self.P * np.log(self.P + self.eps)).sum(2).sum(1) return self._H9
H9
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n if not hasattr(self, '_H9'):\n self._H9 = -(self.P * np.log(self.P + self.eps)).sum(2).sum(1)\n return self._H9\n", "role": "user" }, { "content": "H9", "role": "assistant" } ]
def x(self): c = (self.rlevels * self.p_xminusy).sum(1) c1 = np.tile(c, (self.nlevels, 1)).transpose() e = self.rlevels - c1 return (self.p_xminusy * e ** 2).sum(1)
H10
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n c = (self.rlevels * self.p_xminusy).sum(1)\n c1 = np.tile(c, (self.nlevels, 1)).transpose()\n e = self.rlevels - c1\n return (self.p_xminusy * e ** 2).sum(1)\n", "role": "user" }, { "content": "H10", "role": "assistant" } ]
def x(self): return -(self.p_xminusy * np.log(self.p_xminusy + self.eps)).sum(1)
H11
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n return -(self.p_xminusy * np.log(self.p_xminusy + self.eps)).sum(1)\n", "role": "user" }, { "content": "H11", "role": "assistant" } ]
def x(self): maxima = np.vstack((self.hx, self.hy)).max(0) return (self.H9() - self.hxy1) / maxima
H12
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n maxima = np.vstack((self.hx, self.hy)).max(0)\n return (self.H9() - self.hxy1) / maxima\n", "role": "user" }, { "content": "H12", "role": "assistant" } ]
def x(self): return np.sqrt(1 - np.exp(-2 * (self.hxy2 - self.H9())))
H13
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n return np.sqrt(1 - np.exp(-2 * (self.hxy2 - self.H9())))\n", "role": "user" }, { "content": "H13", "role": "assistant" } ]
def x(zernike_indexes): n_max = np.max(zernike_indexes[:, 0]) factorial = np.ones((1 + n_max,), dtype=float) factorial[1:] = np.cumproduct(np.arange(1, 1 + n_max, dtype=float)) width = int(n_max // 2 + 1) lut = np.zeros((zernike_indexes.shape[0], width), dtype=float) for idx, (n, m) in enumerate(zernike_indexes): alt = 1 npmh = (n + m) // 2 nmmh = (n - m) // 2 for k in range(0, nmmh + 1): lut[idx, k] = alt * factorial[n - k] / (factorial[k] * factorial[npmh - k] * factorial[nmmh - k]) alt = -alt return lut
construct_zernike_lookuptable
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(zernike_indexes):\n n_max = np.max(zernike_indexes[:, 0])\n factorial = np.ones((1 + n_max,), dtype=float)\n factorial[1:] = np.cumproduct(np.arange(1, 1 + n_max, dtype=float))\n width = int(n_max // 2 + 1)\n lut = np.zeros((zernike_indexes.shape[0], width), dtype=float)\n for idx, (n, m) in enumerate(zernike_indexes):\n alt = 1\n npmh = (n + m) // 2\n nmmh = (n - m) // 2\n for k in range(0, nmmh + 1):\n lut[idx, k] = alt * factorial[n - k] / (factorial[k] *\n factorial[npmh - k] * factorial[nmmh - k])\n alt = -alt\n return lut\n", "role": "user" }, { "content": "construct_zernike_lookuptable", "role": "assistant" } ]
def x(x, y, zernike_indexes, mask=None, weight=None): if x.shape != y.shape: raise ValueError('X and Y must have the same shape') if mask is None: pass elif mask.shape != x.shape: raise ValueError('The mask must have the same shape as X and Y') else: x = x[mask] y = y[mask] if weight is not None: weight = weight[mask] lut = construct_zernike_lookuptable(zernike_indexes) nzernikes = zernike_indexes.shape[0] r_square = np.square(x) np.add(r_square, np.square(y), out=r_square) z = np.empty(x.shape, np.complex) np.copyto(z.real, y) np.copyto(z.imag, x) s = np.empty_like(x) zf = np.zeros((nzernikes,) + x.shape, np.complex) z_pows = {} for idx, (n, m) in enumerate(zernike_indexes): s[:] = 0 if not m in z_pows: if m == 0: z_pows[m] = np.complex(1.0) else: z_pows[m] = z if m == 1 else z ** m z_pow = z_pows[m] for k in range((n - m) // 2 + 1): s *= r_square s += lut[idx, k] s[r_square > 1] = 0 if weight is not None: s *= weight.astype(s.dtype) if m == 0: np.copyto(zf[idx], s) else: np.multiply(s, z_pow, out=zf[idx]) if mask is None: result = zf.transpose(tuple(range(1, 1 + x.ndim)) + (0,)) else: result = np.zeros(mask.shape + (nzernikes,), np.complex) result[mask] = zf.transpose(tuple(range(1, 1 + x.ndim)) + (0,)) return result
construct_zernike_polynomials
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(x, y, zernike_indexes, mask=None, weight=None):\n if x.shape != y.shape:\n raise ValueError('X and Y must have the same shape')\n if mask is None:\n pass\n elif mask.shape != x.shape:\n raise ValueError('The mask must have the same shape as X and Y')\n else:\n x = x[mask]\n y = y[mask]\n if weight is not None:\n weight = weight[mask]\n lut = construct_zernike_lookuptable(zernike_indexes)\n nzernikes = zernike_indexes.shape[0]\n r_square = np.square(x)\n np.add(r_square, np.square(y), out=r_square)\n z = np.empty(x.shape, np.complex)\n np.copyto(z.real, y)\n np.copyto(z.imag, x)\n s = np.empty_like(x)\n zf = np.zeros((nzernikes,) + x.shape, np.complex)\n z_pows = {}\n for idx, (n, m) in enumerate(zernike_indexes):\n s[:] = 0\n if not m in z_pows:\n if m == 0:\n z_pows[m] = np.complex(1.0)\n else:\n z_pows[m] = z if m == 1 else z ** m\n z_pow = z_pows[m]\n for k in range((n - m) // 2 + 1):\n s *= r_square\n s += lut[idx, k]\n s[r_square > 1] = 0\n if weight is not None:\n s *= weight.astype(s.dtype)\n if m == 0:\n np.copyto(zf[idx], s)\n else:\n np.multiply(s, z_pow, out=zf[idx])\n if mask is None:\n result = zf.transpose(tuple(range(1, 1 + x.ndim)) + (0,))\n else:\n result = np.zeros(mask.shape + (nzernikes,), np.complex)\n result[mask] = zf.transpose(tuple(range(1, 1 + x.ndim)) + (0,))\n return result\n", "role": "user" }, { "content": "construct_zernike_polynomials", "role": "assistant" } ]
def x(zf, radii, labels, indexes=None): if indexes is None: indexes = np.arange(1, np.max(labels) + 1, dtype=np.int32) else: indexes = np.array(indexes, dtype=np.int32) radii = np.asarray(radii, dtype=float) n = radii.size k = zf.shape[2] score = np.zeros((n, k)) if n == 0: return score areas = np.square(radii) areas *= np.pi for ki in range(k): zfk = zf[:, :, ki] real_score = scipy.ndimage.sum(zfk.real, labels, indexes) real_score = fixup_scipy_ndimage_result(real_score) imag_score = scipy.ndimage.sum(zfk.imag, labels, indexes) imag_score = fixup_scipy_ndimage_result(imag_score) np.square(real_score, out=real_score) np.square(imag_score, out=imag_score) one_score = real_score + imag_score np.sqrt(one_score, out=one_score) one_score /= areas score[:, ki] = one_score return score
score_zernike
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(zf, radii, labels, indexes=None):\n if indexes is None:\n indexes = np.arange(1, np.max(labels) + 1, dtype=np.int32)\n else:\n indexes = np.array(indexes, dtype=np.int32)\n radii = np.asarray(radii, dtype=float)\n n = radii.size\n k = zf.shape[2]\n score = np.zeros((n, k))\n if n == 0:\n return score\n areas = np.square(radii)\n areas *= np.pi\n for ki in range(k):\n zfk = zf[:, :, ki]\n real_score = scipy.ndimage.sum(zfk.real, labels, indexes)\n real_score = fixup_scipy_ndimage_result(real_score)\n imag_score = scipy.ndimage.sum(zfk.imag, labels, indexes)\n imag_score = fixup_scipy_ndimage_result(imag_score)\n np.square(real_score, out=real_score)\n np.square(imag_score, out=imag_score)\n one_score = real_score + imag_score\n np.sqrt(one_score, out=one_score)\n one_score /= areas\n score[:, ki] = one_score\n return score\n", "role": "user" }, { "content": "score_zernike", "role": "assistant" } ]
def x(zernike_indexes, labels, indexes): indexes = np.array(indexes, dtype=np.int32) nindexes = len(indexes) reverse_indexes = np.empty((np.max(indexes) + 1,), int) reverse_indexes.fill(-1) reverse_indexes[indexes] = np.arange(indexes.shape[0], dtype=int) mask = reverse_indexes[labels] != -1 centers, radii = minimum_enclosing_circle(labels, indexes) ny, nx = labels.shape[0:2] y, x = np.asarray(np.mgrid[0:ny - 1:complex(0, ny), 0:nx - 1:complex(0, nx)], dtype=float) xm = x[mask] ym = y[mask] lm = labels[mask] rev_ind = reverse_indexes[lm] ym -= centers[rev_ind, 0] ym /= radii[rev_ind] xm -= centers[rev_ind, 1] xm /= radii[rev_ind] x = np.zeros_like(x) x[mask] = xm y = np.zeros_like(y) y[mask] = ym score = np.zeros((nindexes, len(zernike_indexes))) zf = construct_zernike_polynomials(x, y, zernike_indexes, mask) score = score_zernike(zf, radii, labels, indexes) return score
zernike
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(zernike_indexes, labels, indexes):\n indexes = np.array(indexes, dtype=np.int32)\n nindexes = len(indexes)\n reverse_indexes = np.empty((np.max(indexes) + 1,), int)\n reverse_indexes.fill(-1)\n reverse_indexes[indexes] = np.arange(indexes.shape[0], dtype=int)\n mask = reverse_indexes[labels] != -1\n centers, radii = minimum_enclosing_circle(labels, indexes)\n ny, nx = labels.shape[0:2]\n y, x = np.asarray(np.mgrid[0:ny - 1:complex(0, ny), 0:nx - 1:complex(0,\n nx)], dtype=float)\n xm = x[mask]\n ym = y[mask]\n lm = labels[mask]\n rev_ind = reverse_indexes[lm]\n ym -= centers[rev_ind, 0]\n ym /= radii[rev_ind]\n xm -= centers[rev_ind, 1]\n xm /= radii[rev_ind]\n x = np.zeros_like(x)\n x[mask] = xm\n y = np.zeros_like(y)\n y[mask] = ym\n score = np.zeros((nindexes, len(zernike_indexes)))\n zf = construct_zernike_polynomials(x, y, zernike_indexes, mask)\n score = score_zernike(zf, radii, labels, indexes)\n return score\n", "role": "user" }, { "content": "zernike", "role": "assistant" } ]
def x(limit=10): def x(n_max): for n in range(0, n_max): for m in range(n % 2, n + 1, 2): yield n yield m z_ind = np.fromiter(zernike_indexes_iter(limit), np.intc) z_ind = z_ind.reshape((len(z_ind) // 2, 2)) return z_ind
get_zernike_indexes
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(limit=10):\n\n def x(n_max):\n for n in range(0, n_max):\n for m in range(n % 2, n + 1, 2):\n yield n\n yield m\n z_ind = np.fromiter(zernike_indexes_iter(limit), np.intc)\n z_ind = z_ind.reshape((len(z_ind) // 2, 2))\n return z_ind\n", "role": "user" }, { "content": "get_zernike_indexes", "role": "assistant" } ]
def x(image, labels, mask, weight): if image.shape != labels.shape: raise ValueError('Image shape %s != label shape %s' % (repr(image. shape), repr(labels.shape))) if image.shape != mask.shape: raise ValueError('Image shape %s != mask shape %s' % (repr(image. shape), repr(mask.shape))) labels_out = np.zeros(labels.shape, np.int32) distances = -np.ones(labels.shape, np.float64) distances[labels > 0] = 0 labels_and_mask = np.logical_and(labels != 0, mask) coords = np.argwhere(labels_and_mask) i1, i2 = _propagate.convert_to_ints(0.0) ncoords = coords.shape[0] pq = np.column_stack((np.ones((ncoords,), int) * i1, np.ones((ncoords,), int) * i2, labels[labels_and_mask], coords)) _propagate.propagate(np.ascontiguousarray(image, np.float64), np. ascontiguousarray(pq, np.int32), np.ascontiguousarray(mask, np.int8 ), labels_out, distances, float(weight)) labels_out[labels > 0] = labels[labels > 0] return labels_out, distances
propagate
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(image, labels, mask, weight):\n if image.shape != labels.shape:\n raise ValueError('Image shape %s != label shape %s' % (repr(image.\n shape), repr(labels.shape)))\n if image.shape != mask.shape:\n raise ValueError('Image shape %s != mask shape %s' % (repr(image.\n shape), repr(mask.shape)))\n labels_out = np.zeros(labels.shape, np.int32)\n distances = -np.ones(labels.shape, np.float64)\n distances[labels > 0] = 0\n labels_and_mask = np.logical_and(labels != 0, mask)\n coords = np.argwhere(labels_and_mask)\n i1, i2 = _propagate.convert_to_ints(0.0)\n ncoords = coords.shape[0]\n pq = np.column_stack((np.ones((ncoords,), int) * i1, np.ones((ncoords,),\n int) * i2, labels[labels_and_mask], coords))\n _propagate.propagate(np.ascontiguousarray(image, np.float64), np.\n ascontiguousarray(pq, np.int32), np.ascontiguousarray(mask, np.int8\n ), labels_out, distances, float(weight))\n labels_out[labels > 0] = labels[labels > 0]\n return labels_out, distances\n", "role": "user" }, { "content": "propagate", "role": "assistant" } ]
def x(i, j, costs, wants_dual_variables=False, augmenting_row_reductions=2): import os i = np.atleast_1d(i).astype(int) j = np.atleast_1d(j).astype(int) costs = np.atleast_1d(costs) assert len(i) == len(j), 'i and j must be the same length' assert len(i) == len(costs), 'costs must be the same length as i' j_count = np.bincount(j) assert not np.any(j_count == 0), 'all j must be paired with at least one i' j_index = np.hstack([[0], np.cumsum(j_count[:-1])]) i_count = np.bincount(i) assert not np.any(i_count == 0), 'all i must be paired with at least one j' i_index = np.hstack([[0], np.cumsum(i_count[:-1])]) n = len(j_count) assert n == len(i_count), 'There must be the same number of unique i and j' x = np.ascontiguousarray(np.ones(n, np.uint32) * n) y = np.ascontiguousarray(np.ones(n, np.uint32) * n, np.uint32) u = np.ascontiguousarray(np.zeros(n, np.float64)) order = np.lexsort((-i, costs, j)) min_idx = order[j_index] min_i = i[min_idx] v = np.ascontiguousarray(costs[min_idx], np.float64) x[min_i] = np.arange(n).astype(np.uint32) y[x[x != n]] = np.arange(n).astype(np.uint32)[x != n] assignment_count = np.bincount(min_i[min_i != n]) assignment_count = np.hstack((assignment_count, np.zeros(n - len( assignment_count), int))) free_i = assignment_count == 0 one_i = assignment_count == 1 order = np.lexsort((j, i)) j = np.ascontiguousarray(j[order], np.uint32) costs = np.ascontiguousarray(costs[order], np.float64) i_index = np.ascontiguousarray(i_index, np.uint32) i_count = np.ascontiguousarray(i_count, np.uint32) if np.any(one_i): reduction_transfer(np.ascontiguousarray(np.argwhere(one_i).flatten( ), np.uint32), j, i_index, i_count, x, u, v, costs) ii = np.ascontiguousarray(np.argwhere(free_i).flatten(), np.uint32) if len(ii) > 0: for iii in range(augmenting_row_reductions): ii = augmenting_row_reduction(n, ii, j, i_index, i_count, x, y, u, v, costs) augment(n, ii, j, i_index, i_count, x, y, u, v, costs) if wants_dual_variables: return x, y, u, v else: return x, y
lapjv
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(i, j, costs, wants_dual_variables=False, augmenting_row_reductions=2):\n import os\n i = np.atleast_1d(i).astype(int)\n j = np.atleast_1d(j).astype(int)\n costs = np.atleast_1d(costs)\n assert len(i) == len(j), 'i and j must be the same length'\n assert len(i) == len(costs), 'costs must be the same length as i'\n j_count = np.bincount(j)\n assert not np.any(j_count == 0), 'all j must be paired with at least one i'\n j_index = np.hstack([[0], np.cumsum(j_count[:-1])])\n i_count = np.bincount(i)\n assert not np.any(i_count == 0), 'all i must be paired with at least one j'\n i_index = np.hstack([[0], np.cumsum(i_count[:-1])])\n n = len(j_count)\n assert n == len(i_count), 'There must be the same number of unique i and j'\n x = np.ascontiguousarray(np.ones(n, np.uint32) * n)\n y = np.ascontiguousarray(np.ones(n, np.uint32) * n, np.uint32)\n u = np.ascontiguousarray(np.zeros(n, np.float64))\n order = np.lexsort((-i, costs, j))\n min_idx = order[j_index]\n min_i = i[min_idx]\n v = np.ascontiguousarray(costs[min_idx], np.float64)\n x[min_i] = np.arange(n).astype(np.uint32)\n y[x[x != n]] = np.arange(n).astype(np.uint32)[x != n]\n assignment_count = np.bincount(min_i[min_i != n])\n assignment_count = np.hstack((assignment_count, np.zeros(n - len(\n assignment_count), int)))\n free_i = assignment_count == 0\n one_i = assignment_count == 1\n order = np.lexsort((j, i))\n j = np.ascontiguousarray(j[order], np.uint32)\n costs = np.ascontiguousarray(costs[order], np.float64)\n i_index = np.ascontiguousarray(i_index, np.uint32)\n i_count = np.ascontiguousarray(i_count, np.uint32)\n if np.any(one_i):\n reduction_transfer(np.ascontiguousarray(np.argwhere(one_i).flatten(\n ), np.uint32), j, i_index, i_count, x, u, v, costs)\n ii = np.ascontiguousarray(np.argwhere(free_i).flatten(), np.uint32)\n if len(ii) > 0:\n for iii in range(augmenting_row_reductions):\n ii = augmenting_row_reduction(n, ii, j, i_index, i_count, x, y,\n u, v, costs)\n augment(n, ii, j, i_index, i_count, x, y, u, v, costs)\n if wants_dual_variables:\n return x, y, u, v\n else:\n return x, y\n", "role": "user" }, { "content": "lapjv", "role": "assistant" } ]
def x(ii, j, idx, count, x, u, v, c): for i in ii: j1 = x[i] jj = j[idx[i]:idx[i] + count[i]] uu = np.min((c[idx[i]:idx[i] + count[i]] - v[jj])[jj != j1]) v[j1] = v[j1] - uu + u[i] u[i] = uu
slow_reduction_transfer
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(ii, j, idx, count, x, u, v, c):\n for i in ii:\n j1 = x[i]\n jj = j[idx[i]:idx[i] + count[i]]\n uu = np.min((c[idx[i]:idx[i] + count[i]] - v[jj])[jj != j1])\n v[j1] = v[j1] - uu + u[i]\n u[i] = uu\n", "role": "user" }, { "content": "slow_reduction_transfer", "role": "assistant" } ]
def x(n, ii, jj, idx, count, x, y, u, v, c): ii = list(ii) k = 0 limit = len(ii) free = [] while k < limit: i = ii[k] k += 1 j = jj[idx[i]:idx[i] + count[i]] uu = c[idx[i]:idx[i] + count[i]] - v[j] order = np.lexsort([uu]) u1, u2 = uu[order[:2]] j1, j2 = j[order[:2]] i1 = y[j1] if u1 < u2: v[j1] = v[j1] - u2 + u1 elif i1 != n: j1 = j2 i1 = y[j1] if i1 != n: if u1 < u2: k -= 1 ii[k] = i1 else: free.append(i1) x[i] = j1 y[j1] = i return np.array(free, np.uint32)
slow_augmenting_row_reduction
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(n, ii, jj, idx, count, x, y, u, v, c):\n ii = list(ii)\n k = 0\n limit = len(ii)\n free = []\n while k < limit:\n i = ii[k]\n k += 1\n j = jj[idx[i]:idx[i] + count[i]]\n uu = c[idx[i]:idx[i] + count[i]] - v[j]\n order = np.lexsort([uu])\n u1, u2 = uu[order[:2]]\n j1, j2 = j[order[:2]]\n i1 = y[j1]\n if u1 < u2:\n v[j1] = v[j1] - u2 + u1\n elif i1 != n:\n j1 = j2\n i1 = y[j1]\n if i1 != n:\n if u1 < u2:\n k -= 1\n ii[k] = i1\n else:\n free.append(i1)\n x[i] = j1\n y[j1] = i\n return np.array(free, np.uint32)\n", "role": "user" }, { "content": "slow_augmenting_row_reduction", "role": "assistant" } ]
def x(n, ii, jj, idx, count, x, y, u, v, c): inf = np.sum(c) + 1 d = np.zeros(n) cc = np.zeros((n, n)) cc[:, :] = inf for i in range(n): cc[i, jj[idx[i]:idx[i] + count[i]]] = c[idx[i]:idx[i] + count[i]] c = cc for i in ii: print('Processing i=%d' % i) j = jj[idx[i]:idx[i] + count[i]] d = c[i, :] - v pred = np.ones(n, int) * i on_deck = [] ready = [] scan = [] to_do = list(range(n)) try: while True: print('Evaluating i=%d, n_scan = %d' % (i, len(scan))) if len(scan) == 0: ready += on_deck on_deck = [] umin = np.min([d[jjj] for jjj in to_do]) print('umin = %f' % umin) scan = [jjj for jjj in to_do if d[jjj] == umin] to_do = [jjj for jjj in to_do if d[jjj] != umin] for j1 in scan: if y[j1] == n: raise StopIteration() j1 = scan[0] iii = y[j1] print('Consider replacing i=%d, j=%d' % (iii, j1)) scan = scan[1:] on_deck += [j1] u1 = c[iii, j1] - v[j1] - umin for j1 in list(to_do): h = c[iii, j1] - v[j1] - u1 print( 'Consider j=%d as replacement, c[%d,%d]=%f,v[%d]=%f,h=%f, d[j]= %f' % (j1, iii, j1, c[iii, j1], j1, v[j1], h, d[j1])) if h < d[j1]: print('Add to chain') pred[j1] = iii if h == umin: if y[j1] == n: raise StopIteration() print('Add to scan') scan += [j1] to_do.remove(j1) d[j1] = h except StopIteration: print('Augmenting %d' % j1) for k in ready: temp = v[k] v[k] = v[k] + d[k] - umin print('v[%d] %f -> %f' % (k, temp, v[k])) while True: iii = pred[j1] print('y[%d] %d -> %d' % (j1, y[j1], iii)) y[j1] = iii j1, x[iii] = x[iii], j1 if iii == i: break for i in range(n): j = x[i] u[i] = c[i, j] - v[j]
slow_augment
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(n, ii, jj, idx, count, x, y, u, v, c):\n inf = np.sum(c) + 1\n d = np.zeros(n)\n cc = np.zeros((n, n))\n cc[:, :] = inf\n for i in range(n):\n cc[i, jj[idx[i]:idx[i] + count[i]]] = c[idx[i]:idx[i] + count[i]]\n c = cc\n for i in ii:\n print('Processing i=%d' % i)\n j = jj[idx[i]:idx[i] + count[i]]\n d = c[i, :] - v\n pred = np.ones(n, int) * i\n on_deck = []\n ready = []\n scan = []\n to_do = list(range(n))\n try:\n while True:\n print('Evaluating i=%d, n_scan = %d' % (i, len(scan)))\n if len(scan) == 0:\n ready += on_deck\n on_deck = []\n umin = np.min([d[jjj] for jjj in to_do])\n print('umin = %f' % umin)\n scan = [jjj for jjj in to_do if d[jjj] == umin]\n to_do = [jjj for jjj in to_do if d[jjj] != umin]\n for j1 in scan:\n if y[j1] == n:\n raise StopIteration()\n j1 = scan[0]\n iii = y[j1]\n print('Consider replacing i=%d, j=%d' % (iii, j1))\n scan = scan[1:]\n on_deck += [j1]\n u1 = c[iii, j1] - v[j1] - umin\n for j1 in list(to_do):\n h = c[iii, j1] - v[j1] - u1\n print(\n 'Consider j=%d as replacement, c[%d,%d]=%f,v[%d]=%f,h=%f, d[j]= %f'\n % (j1, iii, j1, c[iii, j1], j1, v[j1], h, d[j1]))\n if h < d[j1]:\n print('Add to chain')\n pred[j1] = iii\n if h == umin:\n if y[j1] == n:\n raise StopIteration()\n print('Add to scan')\n scan += [j1]\n to_do.remove(j1)\n d[j1] = h\n except StopIteration:\n print('Augmenting %d' % j1)\n for k in ready:\n temp = v[k]\n v[k] = v[k] + d[k] - umin\n print('v[%d] %f -> %f' % (k, temp, v[k]))\n while True:\n iii = pred[j1]\n print('y[%d] %d -> %d' % (j1, y[j1], iii))\n y[j1] = iii\n j1, x[iii] = x[iii], j1\n if iii == i:\n break\n for i in range(n):\n j = x[i]\n u[i] = c[i, j] - v[j]\n", "role": "user" }, { "content": "slow_augment", "role": "assistant" } ]
def x(self, max_dist_kb, size_bin_kb, display_graph): logger.info('estimation of the parameters of the model') self.bins = np.arange(size_bin_kb, max_dist_kb + size_bin_kb, size_bin_kb) self.mean_contacts = np.zeros_like(self.bins, dtype=np.float32) self.dict_collect = dict() self.gpu_vect_frags.copy_from_gpu() epsi = self.mean_value_trans for k in self.bins: self.dict_collect[k] = [] for i in range(0, self.n_frags // 10): start = self.sparse_matrix.indptr[i] end = self.sparse_matrix.indptr[i + 1] id_j = self.sparse_matrix.indices[start:end] data = self.sparse_matrix.data[start:end] info_i = self.np_sub_frags_2_frags[i] init_id_fi = int(info_i[0]) id_c_i = self.S_o_A_frags['id_c'][init_id_fi] s_i = self.S_o_A_frags['start_bp'][init_id_fi ] / 1000.0 + self.np_sub_frags_2_frags[i][1] len_kb_c_i = self.S_o_A_frags['l_cont_bp'][init_id_fi] / 1000 if size_bin_kb < len_kb_c_i: local_bins = np.arange(size_bin_kb, max_dist_kb + size_bin_kb, size_bin_kb) local_storage = np.zeros_like(local_bins, dtype=np.int32) for fj, dj in zip(id_j, data): info_j = self.np_sub_frags_2_frags[fj] init_id_fj = int(info_j[0]) id_c_j = self.S_o_A_frags['id_c'][init_id_fj] if id_c_i == id_c_j: s_j = self.S_o_A_frags['start_bp'][init_id_fj ] / 1000.0 + self.np_sub_frags_2_frags[fj][1] d = np.abs(s_i - s_j) if d < max_dist_kb: id_bin = int(d / size_bin_kb) local_storage[id_bin] += dj for bin, val in zip(local_bins, local_storage): self.dict_collect[bin].append(val) for id_bin in range(0, len(self.bins)): k = self.bins[id_bin] self.mean_contacts[id_bin] = np.mean(self.dict_collect[k]) for id_bin in range(0, len(self.bins)): k = self.bins[id_bin] tmp = np.mean(self.dict_collect[k]) if np.isnan(tmp) or tmp == 0: self.mean_contacts[id_bin] = np.nan else: self.mean_contacts[id_bin] = tmp + epsi self.mean_contacts_upd = [] self.bins_upd = [] for count, ele in zip(self.mean_contacts, self.bins): if not np.isnan(count): self.bins_upd.append(ele) self.mean_contacts_upd.append(count) self.bins_upd = np.array(self.bins_upd) self.mean_contacts_upd = np.array(self.mean_contacts_upd) p, self.y_estim = opti.estimate_param_rippe(self.mean_contacts_upd, self.bins_upd) logger.info('p from estimate parameters = {}'.format(p)) fit_param = p logger.info('mean value trans = {}'.format(self.mean_value_trans)) logger.info('BEWARE!!! : I will lower mean value trans !!!') self.mean_value_trans = self.mean_value_trans / 10.0 estim_max_dist = opti.estimate_max_dist_intra(fit_param, self. mean_value_trans) logger.info('estimate max dist cis trans = {}'.format(estim_max_dist)) self.param_simu = self.setup_rippe_parameters(fit_param, estim_max_dist) self.param_simu_test = self.param_simu self.gpu_param_simu = cuda.mem_alloc(self.param_simu.nbytes) self.gpu_param_simu_test = cuda.mem_alloc(self.param_simu.nbytes) cuda.memcpy_htod(self.gpu_param_simu, self.param_simu) cuda.memcpy_htod(self.gpu_param_simu_test, self.param_simu_test) display_graph = False if display_graph: plt.figure() plt.loglog(self.bins_upd, self.mean_contacts_upd, '-*b') plt.loglog(self.bins_upd, self.y_estim, '-*r') plt.xlabel('genomic distance (kb)') plt.ylabel('frequency of contact') plt.title( '$\\mathrm{Frequency\\ of\\ contact\\ versus\\ genomic\\ distance \\(data\\ tricho test):}\\ slope=%.3f,\\ max\\ cis\\ distance(kb)=%.3f\\ d=%.3f\\ scale\\ factor=%.3f\\ $' % (self.param_simu['slope'], estim_max_dist, self.param_simu[ 'd'], self.param_simu['fact'])) plt.legend(['obs', 'fit']) plt.savefig() plt.close() self.eval_likelihood_init()
estimate_parameters_rippe
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, max_dist_kb, size_bin_kb, display_graph):\n logger.info('estimation of the parameters of the model')\n self.bins = np.arange(size_bin_kb, max_dist_kb + size_bin_kb, size_bin_kb)\n self.mean_contacts = np.zeros_like(self.bins, dtype=np.float32)\n self.dict_collect = dict()\n self.gpu_vect_frags.copy_from_gpu()\n epsi = self.mean_value_trans\n for k in self.bins:\n self.dict_collect[k] = []\n for i in range(0, self.n_frags // 10):\n start = self.sparse_matrix.indptr[i]\n end = self.sparse_matrix.indptr[i + 1]\n id_j = self.sparse_matrix.indices[start:end]\n data = self.sparse_matrix.data[start:end]\n info_i = self.np_sub_frags_2_frags[i]\n init_id_fi = int(info_i[0])\n id_c_i = self.S_o_A_frags['id_c'][init_id_fi]\n s_i = self.S_o_A_frags['start_bp'][init_id_fi\n ] / 1000.0 + self.np_sub_frags_2_frags[i][1]\n len_kb_c_i = self.S_o_A_frags['l_cont_bp'][init_id_fi] / 1000\n if size_bin_kb < len_kb_c_i:\n local_bins = np.arange(size_bin_kb, max_dist_kb + size_bin_kb,\n size_bin_kb)\n local_storage = np.zeros_like(local_bins, dtype=np.int32)\n for fj, dj in zip(id_j, data):\n info_j = self.np_sub_frags_2_frags[fj]\n init_id_fj = int(info_j[0])\n id_c_j = self.S_o_A_frags['id_c'][init_id_fj]\n if id_c_i == id_c_j:\n s_j = self.S_o_A_frags['start_bp'][init_id_fj\n ] / 1000.0 + self.np_sub_frags_2_frags[fj][1]\n d = np.abs(s_i - s_j)\n if d < max_dist_kb:\n id_bin = int(d / size_bin_kb)\n local_storage[id_bin] += dj\n for bin, val in zip(local_bins, local_storage):\n self.dict_collect[bin].append(val)\n for id_bin in range(0, len(self.bins)):\n k = self.bins[id_bin]\n self.mean_contacts[id_bin] = np.mean(self.dict_collect[k])\n for id_bin in range(0, len(self.bins)):\n k = self.bins[id_bin]\n tmp = np.mean(self.dict_collect[k])\n if np.isnan(tmp) or tmp == 0:\n self.mean_contacts[id_bin] = np.nan\n else:\n self.mean_contacts[id_bin] = tmp + epsi\n self.mean_contacts_upd = []\n self.bins_upd = []\n for count, ele in zip(self.mean_contacts, self.bins):\n if not np.isnan(count):\n self.bins_upd.append(ele)\n self.mean_contacts_upd.append(count)\n self.bins_upd = np.array(self.bins_upd)\n self.mean_contacts_upd = np.array(self.mean_contacts_upd)\n p, self.y_estim = opti.estimate_param_rippe(self.mean_contacts_upd,\n self.bins_upd)\n logger.info('p from estimate parameters = {}'.format(p))\n fit_param = p\n logger.info('mean value trans = {}'.format(self.mean_value_trans))\n logger.info('BEWARE!!! : I will lower mean value trans !!!')\n self.mean_value_trans = self.mean_value_trans / 10.0\n estim_max_dist = opti.estimate_max_dist_intra(fit_param, self.\n mean_value_trans)\n logger.info('estimate max dist cis trans = {}'.format(estim_max_dist))\n self.param_simu = self.setup_rippe_parameters(fit_param, estim_max_dist)\n self.param_simu_test = self.param_simu\n self.gpu_param_simu = cuda.mem_alloc(self.param_simu.nbytes)\n self.gpu_param_simu_test = cuda.mem_alloc(self.param_simu.nbytes)\n cuda.memcpy_htod(self.gpu_param_simu, self.param_simu)\n cuda.memcpy_htod(self.gpu_param_simu_test, self.param_simu_test)\n display_graph = False\n if display_graph:\n plt.figure()\n plt.loglog(self.bins_upd, self.mean_contacts_upd, '-*b')\n plt.loglog(self.bins_upd, self.y_estim, '-*r')\n plt.xlabel('genomic distance (kb)')\n plt.ylabel('frequency of contact')\n plt.title(\n '$\\\\mathrm{Frequency\\\\ of\\\\ contact\\\\ versus\\\\ genomic\\\\ distance \\\\(data\\\\ tricho test):}\\\\ slope=%.3f,\\\\ max\\\\ cis\\\\ distance(kb)=%.3f\\\\ d=%.3f\\\\ scale\\\\ factor=%.3f\\\\ $'\n % (self.param_simu['slope'], estim_max_dist, self.param_simu[\n 'd'], self.param_simu['fact']))\n plt.legend(['obs', 'fit'])\n plt.savefig()\n plt.close()\n self.eval_likelihood_init()\n", "role": "user" }, { "content": "estimate_parameters_rippe", "role": "assistant" } ]
def x(self, max_dist_kb, size_bin_kb, display_graph): logger.info('estimation of the parameters of the model') self.bins = np.arange(size_bin_kb, max_dist_kb + size_bin_kb, size_bin_kb) self.mean_contacts = np.zeros_like(self.bins, dtype=np.float32) self.dict_collect = dict() self.gpu_vect_frags.copy_from_gpu() epsi = self.mean_value_trans for k in self.bins: self.dict_collect[k] = [] for i in range(0, 2000): start = self.sparse_matrix.indptr[i] end = self.sparse_matrix.indptr[i + 1] id_j = self.sparse_matrix.indices[start:end] data = self.sparse_matrix.data[start:end] info_i = self.np_sub_frags_2_frags[i] init_id_fi = info_i[0] id_c_i = self.S_o_A_frags['id_c'][init_id_fi] s_i = self.S_o_A_frags['start_bp'][init_id_fi ] / 1000.0 + self.np_sub_frags_2_frags[i][1] len_kb_c_i = self.S_o_A_frags['l_cont_bp'][init_id_fi] / 1000 local_bins = np.arange(size_bin_kb, min(len_kb_c_i, max_dist_kb) + size_bin_kb, size_bin_kb) local_storage = np.zeros_like(local_bins, dtype=np.int32) for fj, dj in zip(id_j, data): info_j = self.np_sub_frags_2_frags[fj] init_id_fj = info_j[0] id_c_j = self.S_o_A_frags['id_c'][init_id_fj] if id_c_i == id_c_j: s_j = self.S_o_A_frags['start_bp'][init_id_fj ] / 1000.0 + self.np_sub_frags_2_frags[fj][1] d = np.abs(s_i - s_j) if d < max_dist_kb: id_bin = d / size_bin_kb local_storage[id_bin] += dj for my_bin, val in zip(local_bins, local_storage): self.dict_collect[my_bin].append(val) for id_bin in range(0, len(self.bins)): k = self.bins[id_bin] self.mean_contacts[id_bin] = np.mean(self.dict_collect[k]) for id_bin in range(0, len(self.bins)): k = self.bins[id_bin] tmp = np.mean(self.dict_collect[k]) if np.isnan(tmp) or tmp == 0: self.mean_contacts[id_bin] = np.nan else: self.mean_contacts[id_bin] = tmp + epsi self.mean_contacts_upd = [] self.bins_upd = [] for count, ele in zip(self.mean_contacts, self.bins): if not np.isnan(count): self.bins_upd.append(ele) self.mean_contacts_upd.append(count) self.bins_upd = np.array(self.bins_upd) self.mean_contacts_upd = np.array(self.mean_contacts_upd) p, self.y_estim = nuis.estimate_param_hic(self.mean_contacts_upd, self. bins_upd) fit_param = p.x logger.info('mean value trans = {}'.format(self.mean_value_trans)) estim_max_dist = nuis.estimate_max_dist_intra(fit_param, self. mean_value_trans) logger.info('max distance cis/trans = {}'.format(estim_max_dist)) self.param_simu = self.setup_model_parameters(fit_param, estim_max_dist) self.gpu_param_simu = cuda.mem_alloc(self.param_simu.nbytes) self.gpu_param_simu_test = cuda.mem_alloc(self.param_simu.nbytes) cuda.memcpy_htod(self.gpu_param_simu, self.param_simu) if display_graph: plt.loglog(self.bins_upd, self.mean_contacts_upd, '-*b') plt.loglog(self.bins_upd, self.y_estim, '-*r') plt.xlabel('genomic distance (kb)') plt.ylabel('frequency of contact') plt.legend(['obs', 'fit']) plt.show()
estimate_parameters
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, max_dist_kb, size_bin_kb, display_graph):\n logger.info('estimation of the parameters of the model')\n self.bins = np.arange(size_bin_kb, max_dist_kb + size_bin_kb, size_bin_kb)\n self.mean_contacts = np.zeros_like(self.bins, dtype=np.float32)\n self.dict_collect = dict()\n self.gpu_vect_frags.copy_from_gpu()\n epsi = self.mean_value_trans\n for k in self.bins:\n self.dict_collect[k] = []\n for i in range(0, 2000):\n start = self.sparse_matrix.indptr[i]\n end = self.sparse_matrix.indptr[i + 1]\n id_j = self.sparse_matrix.indices[start:end]\n data = self.sparse_matrix.data[start:end]\n info_i = self.np_sub_frags_2_frags[i]\n init_id_fi = info_i[0]\n id_c_i = self.S_o_A_frags['id_c'][init_id_fi]\n s_i = self.S_o_A_frags['start_bp'][init_id_fi\n ] / 1000.0 + self.np_sub_frags_2_frags[i][1]\n len_kb_c_i = self.S_o_A_frags['l_cont_bp'][init_id_fi] / 1000\n local_bins = np.arange(size_bin_kb, min(len_kb_c_i, max_dist_kb) +\n size_bin_kb, size_bin_kb)\n local_storage = np.zeros_like(local_bins, dtype=np.int32)\n for fj, dj in zip(id_j, data):\n info_j = self.np_sub_frags_2_frags[fj]\n init_id_fj = info_j[0]\n id_c_j = self.S_o_A_frags['id_c'][init_id_fj]\n if id_c_i == id_c_j:\n s_j = self.S_o_A_frags['start_bp'][init_id_fj\n ] / 1000.0 + self.np_sub_frags_2_frags[fj][1]\n d = np.abs(s_i - s_j)\n if d < max_dist_kb:\n id_bin = d / size_bin_kb\n local_storage[id_bin] += dj\n for my_bin, val in zip(local_bins, local_storage):\n self.dict_collect[my_bin].append(val)\n for id_bin in range(0, len(self.bins)):\n k = self.bins[id_bin]\n self.mean_contacts[id_bin] = np.mean(self.dict_collect[k])\n for id_bin in range(0, len(self.bins)):\n k = self.bins[id_bin]\n tmp = np.mean(self.dict_collect[k])\n if np.isnan(tmp) or tmp == 0:\n self.mean_contacts[id_bin] = np.nan\n else:\n self.mean_contacts[id_bin] = tmp + epsi\n self.mean_contacts_upd = []\n self.bins_upd = []\n for count, ele in zip(self.mean_contacts, self.bins):\n if not np.isnan(count):\n self.bins_upd.append(ele)\n self.mean_contacts_upd.append(count)\n self.bins_upd = np.array(self.bins_upd)\n self.mean_contacts_upd = np.array(self.mean_contacts_upd)\n p, self.y_estim = nuis.estimate_param_hic(self.mean_contacts_upd, self.\n bins_upd)\n fit_param = p.x\n logger.info('mean value trans = {}'.format(self.mean_value_trans))\n estim_max_dist = nuis.estimate_max_dist_intra(fit_param, self.\n mean_value_trans)\n logger.info('max distance cis/trans = {}'.format(estim_max_dist))\n self.param_simu = self.setup_model_parameters(fit_param, estim_max_dist)\n self.gpu_param_simu = cuda.mem_alloc(self.param_simu.nbytes)\n self.gpu_param_simu_test = cuda.mem_alloc(self.param_simu.nbytes)\n cuda.memcpy_htod(self.gpu_param_simu, self.param_simu)\n if display_graph:\n plt.loglog(self.bins_upd, self.mean_contacts_upd, '-*b')\n plt.loglog(self.bins_upd, self.y_estim, '-*r')\n plt.xlabel('genomic distance (kb)')\n plt.ylabel('frequency of contact')\n plt.legend(['obs', 'fit'])\n plt.show()\n", "role": "user" }, { "content": "estimate_parameters", "role": "assistant" } ]
def x(linkage_records): def x(linkage_record): linkage_group, genetic_distance, scaffold = linkage_record[1 ], linkage_record[2], linkage_record[3] return linkage_group, genetic_distance, scaffold degenerate_records = [] for _, degenerate_group in itertools.groupby(linkage_records, key= degeneracy): group_list = list(degenerate_group) start_record, end_record = group_list[0], group_list[-1] assert (start_record[1], start_record[2], start_record[3]) == ( end_record[1], end_record[2], end_record[3]) start_position = start_record[-1] end_position = end_record[-1] scaffold = start_record[3] linkage_group = start_record[1] record = [linkage_group, start_position, end_position, scaffold] degenerate_records.append(record) return degenerate_records
collapse_degenerate_markers
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(linkage_records):\n\n def x(linkage_record):\n linkage_group, genetic_distance, scaffold = linkage_record[1\n ], linkage_record[2], linkage_record[3]\n return linkage_group, genetic_distance, scaffold\n degenerate_records = []\n for _, degenerate_group in itertools.groupby(linkage_records, key=\n degeneracy):\n group_list = list(degenerate_group)\n start_record, end_record = group_list[0], group_list[-1]\n assert (start_record[1], start_record[2], start_record[3]) == (\n end_record[1], end_record[2], end_record[3])\n start_position = start_record[-1]\n end_position = end_record[-1]\n scaffold = start_record[3]\n linkage_group = start_record[1]\n record = [linkage_group, start_position, end_position, scaffold]\n degenerate_records.append(record)\n return degenerate_records\n", "role": "user" }, { "content": "collapse_degenerate_markers", "role": "assistant" } ]
def x(linkage_records): new_records = dict() for lg_name, linkage_group in itertools.groupby(linkage_records, operator.itemgetter(0)): new_records[lg_name] = [] for record in linkage_group: init_contig = record[-1] start = record[1] end = record[2] new_record = [init_contig, -3, start, end, 1] new_records[lg_name].append(new_record) return new_records
linkage_group_ordering
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(linkage_records):\n new_records = dict()\n for lg_name, linkage_group in itertools.groupby(linkage_records,\n operator.itemgetter(0)):\n new_records[lg_name] = []\n for record in linkage_group:\n init_contig = record[-1]\n start = record[1]\n end = record[2]\n new_record = [init_contig, -3, start, end, 1]\n new_records[lg_name].append(new_record)\n return new_records\n", "role": "user" }, { "content": "linkage_group_ordering", "role": "assistant" } ]
def x(info_frags_records, linkage_orderings): scaffolds = info_frags_records.keys() linkage_groups = linkage_orderings.keys() best_matching_table = dict() for scaffold, linkage_group in itertools.product(scaffolds, linkage_groups ): lg_ordering = [init_contig for init_contig, _ in itertools.groupby( linkage_orderings[linkage_group], operator.itemgetter(0))] scaffold_ordering = [init_contig for init_contig, bin_group in itertools.groupby(info_frags_records[scaffold], operator. itemgetter(0)) if init_contig in lg_ordering] overlap = set(lg_ordering).intersection(set(scaffold_ordering)) missing_locations = dict() for missing_block in sorted(set(lg_ordering) - set(overlap)): for i, init_contig in enumerate(lg_ordering): if init_contig == missing_block: try: block_before = lg_ordering[i - 1] except IndexError: block_before = 'beginning' missing_locations[missing_block] = block_before try: if len(overlap) > best_matching_table[scaffold][0]: best_matching_table[scaffold] = len(overlap ), linkage_group, missing_locations except KeyError: best_matching_table[scaffold] = len(overlap ), linkage_group, missing_locations return best_matching_table
compare_orderings
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(info_frags_records, linkage_orderings):\n scaffolds = info_frags_records.keys()\n linkage_groups = linkage_orderings.keys()\n best_matching_table = dict()\n for scaffold, linkage_group in itertools.product(scaffolds, linkage_groups\n ):\n lg_ordering = [init_contig for init_contig, _ in itertools.groupby(\n linkage_orderings[linkage_group], operator.itemgetter(0))]\n scaffold_ordering = [init_contig for init_contig, bin_group in\n itertools.groupby(info_frags_records[scaffold], operator.\n itemgetter(0)) if init_contig in lg_ordering]\n overlap = set(lg_ordering).intersection(set(scaffold_ordering))\n missing_locations = dict()\n for missing_block in sorted(set(lg_ordering) - set(overlap)):\n for i, init_contig in enumerate(lg_ordering):\n if init_contig == missing_block:\n try:\n block_before = lg_ordering[i - 1]\n except IndexError:\n block_before = 'beginning'\n missing_locations[missing_block] = block_before\n try:\n if len(overlap) > best_matching_table[scaffold][0]:\n best_matching_table[scaffold] = len(overlap\n ), linkage_group, missing_locations\n except KeyError:\n best_matching_table[scaffold] = len(overlap\n ), linkage_group, missing_locations\n return best_matching_table\n", "role": "user" }, { "content": "compare_orderings", "role": "assistant" } ]
def x(info_frags_records, matching_pairs, linkage_orderings): import logging import sys logging.basicConfig(stream=sys.stdout, level=logging.DEBUG) logger = logging.getLogger() logger.setLevel(logging.DEBUG) touched_lgs = set() def x(key_record_tuple): return len(key_record_tuple[1]) new_scaffolds = copy.deepcopy(info_frags_records) for scaffold_name in collections.OrderedDict(sorted(new_scaffolds.items (), key=record_length, reverse=True)): scaffold = new_scaffolds[scaffold_name] new_scaffold = [] corresponding_lg = matching_pairs[scaffold_name][1] if corresponding_lg in touched_lgs: continue else: touched_lgs.add(corresponding_lg) scaffold_block_names = {my_bin[0] for my_bin in scaffold} lg_block_names = [my_block[0] for my_block in linkage_orderings[ corresponding_lg]] touched_bins = set() for lg_block_name in lg_block_names: if lg_block_name in scaffold_block_names: for my_bin in scaffold: if tuple(my_bin) in new_scaffold: continue elif my_bin[0] == lg_block_name or my_bin[0 ] not in lg_block_names: new_scaffold.append(tuple(my_bin)) touched_bins.add(tuple(my_bin)) else: break for other_name, other_scaffold in new_scaffolds.items(): if other_name == scaffold_name: continue i = 0 for my_bin in other_scaffold: if tuple(my_bin) in new_scaffold: i += 1 continue elif my_bin[0] == lg_block_name: moved_bin = tuple(other_scaffold.pop(i)) new_scaffold.append(tuple(moved_bin)) touched_bins.add(tuple(moved_bin)) i -= 1 i += 1 for remaining_bin in scaffold: if tuple(remaining_bin) not in touched_bins: new_scaffold.append(tuple(remaining_bin)) touched_bins.add(tuple(remaining_bin)) if len(new_scaffold) > 0: new_scaffolds[scaffold_name] = new_scaffold return new_scaffolds
get_missing_blocks
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(info_frags_records, matching_pairs, linkage_orderings):\n import logging\n import sys\n logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)\n logger = logging.getLogger()\n logger.setLevel(logging.DEBUG)\n touched_lgs = set()\n\n def x(key_record_tuple):\n return len(key_record_tuple[1])\n new_scaffolds = copy.deepcopy(info_frags_records)\n for scaffold_name in collections.OrderedDict(sorted(new_scaffolds.items\n (), key=record_length, reverse=True)):\n scaffold = new_scaffolds[scaffold_name]\n new_scaffold = []\n corresponding_lg = matching_pairs[scaffold_name][1]\n if corresponding_lg in touched_lgs:\n continue\n else:\n touched_lgs.add(corresponding_lg)\n scaffold_block_names = {my_bin[0] for my_bin in scaffold}\n lg_block_names = [my_block[0] for my_block in linkage_orderings[\n corresponding_lg]]\n touched_bins = set()\n for lg_block_name in lg_block_names:\n if lg_block_name in scaffold_block_names:\n for my_bin in scaffold:\n if tuple(my_bin) in new_scaffold:\n continue\n elif my_bin[0] == lg_block_name or my_bin[0\n ] not in lg_block_names:\n new_scaffold.append(tuple(my_bin))\n touched_bins.add(tuple(my_bin))\n else:\n break\n for other_name, other_scaffold in new_scaffolds.items():\n if other_name == scaffold_name:\n continue\n i = 0\n for my_bin in other_scaffold:\n if tuple(my_bin) in new_scaffold:\n i += 1\n continue\n elif my_bin[0] == lg_block_name:\n moved_bin = tuple(other_scaffold.pop(i))\n new_scaffold.append(tuple(moved_bin))\n touched_bins.add(tuple(moved_bin))\n i -= 1\n i += 1\n for remaining_bin in scaffold:\n if tuple(remaining_bin) not in touched_bins:\n new_scaffold.append(tuple(remaining_bin))\n touched_bins.add(tuple(remaining_bin))\n if len(new_scaffold) > 0:\n new_scaffolds[scaffold_name] = new_scaffold\n return new_scaffolds\n", "role": "user" }, { "content": "get_missing_blocks", "role": "assistant" } ]
def x(info_frags): new_scaffolds = {} with open(info_frags, 'r') as info_frags_handle: current_new_contig = None for line in info_frags_handle: if line.startswith('>'): current_new_contig = str(line[1:-1]) new_scaffolds[current_new_contig] = [] elif line.startswith('init_contig'): pass else: init_contig, id_frag, orientation, pos_start, pos_end = str( line[:-1]).split('\t') start = int(pos_start) end = int(pos_end) ori = int(orientation) fragid = int(id_frag) assert start < end assert ori in {-1, 1} new_scaffolds[current_new_contig].append([init_contig, fragid, start, end, ori]) return new_scaffolds
parse_info_frags
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(info_frags):\n new_scaffolds = {}\n with open(info_frags, 'r') as info_frags_handle:\n current_new_contig = None\n for line in info_frags_handle:\n if line.startswith('>'):\n current_new_contig = str(line[1:-1])\n new_scaffolds[current_new_contig] = []\n elif line.startswith('init_contig'):\n pass\n else:\n init_contig, id_frag, orientation, pos_start, pos_end = str(\n line[:-1]).split('\\t')\n start = int(pos_start)\n end = int(pos_end)\n ori = int(orientation)\n fragid = int(id_frag)\n assert start < end\n assert ori in {-1, 1}\n new_scaffolds[current_new_contig].append([init_contig,\n fragid, start, end, ori])\n return new_scaffolds\n", "role": "user" }, { "content": "parse_info_frags", "role": "assistant" } ]
def x(bed_file): new_scaffolds = {} with open(bed_file) as bed_handle: for line in bed_handle: chrom, start, end, query, qual, strand = line.split()[:7] if strand == '+': ori = 1 elif strand == '-': ori = -1 else: raise ValueError('Error when parsing strand orientation: {}' .format(strand)) if int(qual) > 0: bed_bin = [query, -2, int(start), int(end), ori] try: new_scaffolds[chrom].append(bed_bin) except KeyError: new_scaffolds[chrom] = [bed_bin] return new_scaffolds
parse_bed
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(bed_file):\n new_scaffolds = {}\n with open(bed_file) as bed_handle:\n for line in bed_handle:\n chrom, start, end, query, qual, strand = line.split()[:7]\n if strand == '+':\n ori = 1\n elif strand == '-':\n ori = -1\n else:\n raise ValueError('Error when parsing strand orientation: {}'\n .format(strand))\n if int(qual) > 0:\n bed_bin = [query, -2, int(start), int(end), ori]\n try:\n new_scaffolds[chrom].append(bed_bin)\n except KeyError:\n new_scaffolds[chrom] = [bed_bin]\n return new_scaffolds\n", "role": "user" }, { "content": "parse_bed", "role": "assistant" } ]
def x(scaffolds, corrector): new_scaffolds = {} def x(bin1, bin2): if bin2 is None: return False init1, _, start1, end1, _ = bin1 init2, _, start2, end2, _ = bin2 if init1 != init2: return False else: return start2 <= start1 <= end2 or start1 <= start2 <= end1 def x(bin1, bin2, ignore_ori=True): init1, _, start1, end1, ori1 = bin1 init2, _, start2, end2, ori2 = bin2 assert init1 == init2 start = min(start1, start2) end = max(end1, end2) if ignore_ori: ori = ori1 else: len1 = end1 - start1 len2 = end2 - start2 if len2 > len1: ori = ori2 else: ori = ori1 new_bin = [init1, -3, start, end, ori] return new_bin corrector_bins = copy.deepcopy(corrector) for name, scaffold in scaffolds.items(): new_scaffold = [] for _, blocks in itertools.groupby(scaffold, operator.itemgetter(0)): merged_bin = None while 'Reading blocks': try: my_bin = next(blocks) if are_overlapping(my_bin, merged_bin): merged_bin = merge_bins(my_bin, merged_bin) continue else: if merged_bin is not None: new_scaffold.append(merged_bin) merged_bin = my_bin i = 0 for correct_bin in corrector_bins: if are_overlapping(my_bin, correct_bin): merged_bin = merge_bins(my_bin, correct_bin) corrector_bins.pop(i) i -= 1 i += 1 except StopIteration: if merged_bin is not None: new_scaffold.append(merged_bin) break new_scaffolds[name] = new_scaffold return new_scaffolds
correct_scaffolds
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(scaffolds, corrector):\n new_scaffolds = {}\n\n def x(bin1, bin2):\n if bin2 is None:\n return False\n init1, _, start1, end1, _ = bin1\n init2, _, start2, end2, _ = bin2\n if init1 != init2:\n return False\n else:\n return start2 <= start1 <= end2 or start1 <= start2 <= end1\n\n def x(bin1, bin2, ignore_ori=True):\n init1, _, start1, end1, ori1 = bin1\n init2, _, start2, end2, ori2 = bin2\n assert init1 == init2\n start = min(start1, start2)\n end = max(end1, end2)\n if ignore_ori:\n ori = ori1\n else:\n len1 = end1 - start1\n len2 = end2 - start2\n if len2 > len1:\n ori = ori2\n else:\n ori = ori1\n new_bin = [init1, -3, start, end, ori]\n return new_bin\n corrector_bins = copy.deepcopy(corrector)\n for name, scaffold in scaffolds.items():\n new_scaffold = []\n for _, blocks in itertools.groupby(scaffold, operator.itemgetter(0)):\n merged_bin = None\n while 'Reading blocks':\n try:\n my_bin = next(blocks)\n if are_overlapping(my_bin, merged_bin):\n merged_bin = merge_bins(my_bin, merged_bin)\n continue\n else:\n if merged_bin is not None:\n new_scaffold.append(merged_bin)\n merged_bin = my_bin\n i = 0\n for correct_bin in corrector_bins:\n if are_overlapping(my_bin, correct_bin):\n merged_bin = merge_bins(my_bin, correct_bin)\n corrector_bins.pop(i)\n i -= 1\n i += 1\n except StopIteration:\n if merged_bin is not None:\n new_scaffold.append(merged_bin)\n break\n new_scaffolds[name] = new_scaffold\n return new_scaffolds\n", "role": "user" }, { "content": "correct_scaffolds", "role": "assistant" } ]
def x(info_frags): if isinstance(info_frags, dict): return info_frags else: try: scaffolds = parse_info_frags(info_frags) return scaffolds except OSError: print('Error when opening info_frags.txt') raise
format_info_frags
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(info_frags):\n if isinstance(info_frags, dict):\n return info_frags\n else:\n try:\n scaffolds = parse_info_frags(info_frags)\n return scaffolds\n except OSError:\n print('Error when opening info_frags.txt')\n raise\n", "role": "user" }, { "content": "format_info_frags", "role": "assistant" } ]
def x(scaffolds): scaffolds = format_info_frags(scaffolds) for name, scaffold in scaffolds.items(): plt.figure() xs = range(len(scaffold)) color = [] names = {} ys = [] for my_bin in scaffold: current_color = 'r' if my_bin[4] > 0 else 'g' color += [current_color] name = my_bin[0] if name in names: ys.append(names[name]) else: names[name] = len(names) ys.append(names[name]) plt.scatter(xs, ys, c=color) plt.show()
plot_info_frags
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(scaffolds):\n scaffolds = format_info_frags(scaffolds)\n for name, scaffold in scaffolds.items():\n plt.figure()\n xs = range(len(scaffold))\n color = []\n names = {}\n ys = []\n for my_bin in scaffold:\n current_color = 'r' if my_bin[4] > 0 else 'g'\n color += [current_color]\n name = my_bin[0]\n if name in names:\n ys.append(names[name])\n else:\n names[name] = len(names)\n ys.append(names[name])\n plt.scatter(xs, ys, c=color)\n plt.show()\n", "role": "user" }, { "content": "plot_info_frags", "role": "assistant" } ]
def x(scaffolds): scaffolds = format_info_frags(scaffolds) new_scaffolds = {} for name, scaffold in scaffolds.items(): new_scaffold = [] if len(scaffold) > 2: for i in range(len(scaffold)): if i == 0: if not (scaffold[i][0] != scaffold[i + 1][0] and scaffold[i + 1][0] == scaffold[i + 2][0]): new_scaffold.append(scaffold[i]) elif i == len(scaffold) - 1: if not (scaffold[i][0] != scaffold[i - 1][0] and scaffold[i - 1][0] == scaffold[i - 2][0]): new_scaffold.append(scaffold[i]) elif not (scaffold[i - 1][0] == scaffold[i + 1][0] and scaffold[i - 1][0] != scaffold[i][0]): new_scaffold.append(scaffold[i]) else: new_scaffold = copy.deepcopy(scaffold) new_scaffolds[name] = new_scaffold return new_scaffolds
remove_spurious_insertions
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(scaffolds):\n scaffolds = format_info_frags(scaffolds)\n new_scaffolds = {}\n for name, scaffold in scaffolds.items():\n new_scaffold = []\n if len(scaffold) > 2:\n for i in range(len(scaffold)):\n if i == 0:\n if not (scaffold[i][0] != scaffold[i + 1][0] and \n scaffold[i + 1][0] == scaffold[i + 2][0]):\n new_scaffold.append(scaffold[i])\n elif i == len(scaffold) - 1:\n if not (scaffold[i][0] != scaffold[i - 1][0] and \n scaffold[i - 1][0] == scaffold[i - 2][0]):\n new_scaffold.append(scaffold[i])\n elif not (scaffold[i - 1][0] == scaffold[i + 1][0] and \n scaffold[i - 1][0] != scaffold[i][0]):\n new_scaffold.append(scaffold[i])\n else:\n new_scaffold = copy.deepcopy(scaffold)\n new_scaffolds[name] = new_scaffold\n return new_scaffolds\n", "role": "user" }, { "content": "remove_spurious_insertions", "role": "assistant" } ]
def x(scaffolds, criterion='colinear'): scaffolds = format_info_frags(scaffolds) new_scaffolds = {} def x(bin1, bin2): return bin1[0] == bin2[0] def x(bin1, bin2): return is_cis(bin1, bin2) and bin1[3] == bin2[2] def x(bin1, bin2): return is_cis(bin1, bin2) and bin1[3] <= bin2[2] condition_callables = {'cis': is_cis, 'colinear': is_colinear, 'contiguous': is_contiguous} block_test = condition_callables.get(criterion, 'colinear') for name, scaffold in scaffolds.items(): new_scaffold = [] block_cumulative_ori = 0 if len(scaffold) > 2: current_bin = scaffold[0] block_buffer = [] for my_bin in scaffold: if not block_buffer: new_bin = copy.deepcopy(my_bin) block_buffer.append(new_bin) block_cumulative_ori = my_bin[-1] continue elif not block_test(current_bin, my_bin): for my_buf_bin in block_buffer: new_bin = copy.deepcopy(my_buf_bin) if block_cumulative_ori >= 0: new_bin[-1] = 1 else: new_bin[-1] = -1 new_scaffold.append(new_bin) block_cumulative_ori = my_bin[-1] current_bin = copy.deepcopy(my_bin) block_buffer = copy.deepcopy([my_bin]) else: block_cumulative_ori += my_bin[-1] new_bin = copy.deepcopy(my_bin) block_buffer.append(new_bin) current_bin = my_bin for my_bin in block_buffer: new_bin = copy.deepcopy(my_bin) if block_cumulative_ori >= 0: new_bin[-1] = 1 else: new_bin[-1] = -1 new_scaffold.append(new_bin) new_scaffolds[name] = copy.deepcopy(new_scaffold) else: new_scaffolds[name] = copy.deepcopy(scaffold) return new_scaffolds
correct_spurious_inversions
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(scaffolds, criterion='colinear'):\n scaffolds = format_info_frags(scaffolds)\n new_scaffolds = {}\n\n def x(bin1, bin2):\n return bin1[0] == bin2[0]\n\n def x(bin1, bin2):\n return is_cis(bin1, bin2) and bin1[3] == bin2[2]\n\n def x(bin1, bin2):\n return is_cis(bin1, bin2) and bin1[3] <= bin2[2]\n condition_callables = {'cis': is_cis, 'colinear': is_colinear,\n 'contiguous': is_contiguous}\n block_test = condition_callables.get(criterion, 'colinear')\n for name, scaffold in scaffolds.items():\n new_scaffold = []\n block_cumulative_ori = 0\n if len(scaffold) > 2:\n current_bin = scaffold[0]\n block_buffer = []\n for my_bin in scaffold:\n if not block_buffer:\n new_bin = copy.deepcopy(my_bin)\n block_buffer.append(new_bin)\n block_cumulative_ori = my_bin[-1]\n continue\n elif not block_test(current_bin, my_bin):\n for my_buf_bin in block_buffer:\n new_bin = copy.deepcopy(my_buf_bin)\n if block_cumulative_ori >= 0:\n new_bin[-1] = 1\n else:\n new_bin[-1] = -1\n new_scaffold.append(new_bin)\n block_cumulative_ori = my_bin[-1]\n current_bin = copy.deepcopy(my_bin)\n block_buffer = copy.deepcopy([my_bin])\n else:\n block_cumulative_ori += my_bin[-1]\n new_bin = copy.deepcopy(my_bin)\n block_buffer.append(new_bin)\n current_bin = my_bin\n for my_bin in block_buffer:\n new_bin = copy.deepcopy(my_bin)\n if block_cumulative_ori >= 0:\n new_bin[-1] = 1\n else:\n new_bin[-1] = -1\n new_scaffold.append(new_bin)\n new_scaffolds[name] = copy.deepcopy(new_scaffold)\n else:\n new_scaffolds[name] = copy.deepcopy(scaffold)\n return new_scaffolds\n", "role": "user" }, { "content": "correct_spurious_inversions", "role": "assistant" } ]
def x(scaffolds): scaffolds = format_info_frags(scaffolds) new_scaffolds = {} ordering = dict() for name, scaffold in scaffolds.items(): new_scaffold = [] ordering = dict() order = 0 my_blocks = [] for _, my_block in itertools.groupby(scaffold, operator.itemgetter(0)): my_bins = list(my_block) my_blocks.append(my_bins) block_length = len(my_bins) block_name = my_bins[0][0] if block_name in ordering.keys(): if block_length > ordering[block_name][1]: ordering[block_name] = order, block_length else: ordering[block_name] = order, block_length order += 1 def x(block): return ordering[block[0][0]][0] for my_block in sorted(my_blocks, key=block_order): for my_bin in my_block: new_scaffold.append(my_bin) new_scaffolds[name] = copy.deepcopy(new_scaffold) return new_scaffolds
rearrange_intra_scaffolds
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(scaffolds):\n scaffolds = format_info_frags(scaffolds)\n new_scaffolds = {}\n ordering = dict()\n for name, scaffold in scaffolds.items():\n new_scaffold = []\n ordering = dict()\n order = 0\n my_blocks = []\n for _, my_block in itertools.groupby(scaffold, operator.itemgetter(0)):\n my_bins = list(my_block)\n my_blocks.append(my_bins)\n block_length = len(my_bins)\n block_name = my_bins[0][0]\n if block_name in ordering.keys():\n if block_length > ordering[block_name][1]:\n ordering[block_name] = order, block_length\n else:\n ordering[block_name] = order, block_length\n order += 1\n\n def x(block):\n return ordering[block[0][0]][0]\n for my_block in sorted(my_blocks, key=block_order):\n for my_bin in my_block:\n new_scaffold.append(my_bin)\n new_scaffolds[name] = copy.deepcopy(new_scaffold)\n return new_scaffolds\n", "role": "user" }, { "content": "rearrange_intra_scaffolds", "role": "assistant" } ]
def x(init_fasta, info_frags, output=DEFAULT_NEW_GENOME_NAME, junction=False): init_genome = {record.id: record.seq for record in SeqIO.parse( init_fasta, 'fasta')} my_new_records = [] with open(info_frags, 'r') as info_frags_handle: current_seq = '' current_id = None previous_contig = None for line in info_frags_handle: if line.startswith('>'): previous_contig = None if current_id is not None: new_record = SeqRecord(current_seq, id=current_id, description='') my_new_records.append(new_record) current_seq = '' current_id = str(line[1:]) elif line.startswith('init_contig'): previous_contig = None else: init_contig, _, orientation, pos_start, pos_end = str(line[:-1] ).split('\t') start = int(pos_start) end = int(pos_end) ori = int(orientation) assert start < end assert ori in {-1, 1} seq_to_add = init_genome[init_contig][start:end] if ori == 1: current_seq += seq_to_add elif ori == -1: current_seq += seq_to_add.reverse_complement() if junction and previous_contig not in {init_contig, None}: error_was_raised = False try: extra_seq = Seq(junction, IUPAC.ambiguous_dna) current_seq = extra_seq + current_seq except TypeError: if not error_was_raised: print('Invalid junction sequence') error_was_raised = True previous_contig = init_contig new_record = SeqRecord(current_seq, id=current_id, description='') my_new_records.append(new_record) SeqIO.write(my_new_records, output, 'fasta')
write_fasta
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(init_fasta, info_frags, output=DEFAULT_NEW_GENOME_NAME, junction=False):\n init_genome = {record.id: record.seq for record in SeqIO.parse(\n init_fasta, 'fasta')}\n my_new_records = []\n with open(info_frags, 'r') as info_frags_handle:\n current_seq = ''\n current_id = None\n previous_contig = None\n for line in info_frags_handle:\n if line.startswith('>'):\n previous_contig = None\n if current_id is not None:\n new_record = SeqRecord(current_seq, id=current_id,\n description='')\n my_new_records.append(new_record)\n current_seq = ''\n current_id = str(line[1:])\n elif line.startswith('init_contig'):\n previous_contig = None\n else:\n init_contig, _, orientation, pos_start, pos_end = str(line[:-1]\n ).split('\\t')\n start = int(pos_start)\n end = int(pos_end)\n ori = int(orientation)\n assert start < end\n assert ori in {-1, 1}\n seq_to_add = init_genome[init_contig][start:end]\n if ori == 1:\n current_seq += seq_to_add\n elif ori == -1:\n current_seq += seq_to_add.reverse_complement()\n if junction and previous_contig not in {init_contig, None}:\n error_was_raised = False\n try:\n extra_seq = Seq(junction, IUPAC.ambiguous_dna)\n current_seq = extra_seq + current_seq\n except TypeError:\n if not error_was_raised:\n print('Invalid junction sequence')\n error_was_raised = True\n previous_contig = init_contig\n new_record = SeqRecord(current_seq, id=current_id, description='')\n my_new_records.append(new_record)\n SeqIO.write(my_new_records, output, 'fasta')\n", "role": "user" }, { "content": "write_fasta", "role": "assistant" } ]
def x(bin_list): id_set = set(my_bin[1] for my_bin in bin_list) start_id, end_id = min(id_set), max(id_set) return id_set == set(range(start_id, end_id + 1))
is_block
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(bin_list):\n id_set = set(my_bin[1] for my_bin in bin_list)\n start_id, end_id = min(id_set), max(id_set)\n return id_set == set(range(start_id, end_id + 1))\n", "role": "user" }, { "content": "is_block", "role": "assistant" } ]
def x(xi, bounds): ge = np.empty_like(xi) for i, (v, bound) in enumerate(zip(xi, bounds)): a = bound[0] b = bound[1] if a == None and b == None: ge[i] = 1.0 elif b == None: ge[i] = v / np.sqrt(v ** 2 + 1) elif a == None: ge[i] = -v / np.sqrt(v ** 2 + 1) else: ge[i] = (b - a) * np.cos(v) / 2.0 return ge
internal2external_grad
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(xi, bounds):\n ge = np.empty_like(xi)\n for i, (v, bound) in enumerate(zip(xi, bounds)):\n a = bound[0]\n b = bound[1]\n if a == None and b == None:\n ge[i] = 1.0\n elif b == None:\n ge[i] = v / np.sqrt(v ** 2 + 1)\n elif a == None:\n ge[i] = -v / np.sqrt(v ** 2 + 1)\n else:\n ge[i] = (b - a) * np.cos(v) / 2.0\n return ge\n", "role": "user" }, { "content": "internal2external_grad", "role": "assistant" } ]
def x(xi, bounds): xe = np.empty_like(xi) for i, (v, bound) in enumerate(zip(xi, bounds)): a = bound[0] b = bound[1] if a == None and b == None: xe[i] = v elif b == None: xe[i] = a - 1.0 + np.sqrt(v ** 2.0 + 1.0) elif a == None: xe[i] = b + 1.0 - np.sqrt(v ** 2.0 + 1.0) else: xe[i] = a + (b - a) / 2.0 * (np.sin(v) + 1.0) return xe
internal2external
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(xi, bounds):\n xe = np.empty_like(xi)\n for i, (v, bound) in enumerate(zip(xi, bounds)):\n a = bound[0]\n b = bound[1]\n if a == None and b == None:\n xe[i] = v\n elif b == None:\n xe[i] = a - 1.0 + np.sqrt(v ** 2.0 + 1.0)\n elif a == None:\n xe[i] = b + 1.0 - np.sqrt(v ** 2.0 + 1.0)\n else:\n xe[i] = a + (b - a) / 2.0 * (np.sin(v) + 1.0)\n return xe\n", "role": "user" }, { "content": "internal2external", "role": "assistant" } ]
def x(xe, bounds): xi = np.empty_like(xe) for i, (v, bound) in enumerate(zip(xe, bounds)): a = bound[0] b = bound[1] if a == None and b == None: xi[i] = v elif b == None: xi[i] = np.sqrt((v - a + 1.0) ** 2.0 - 1) elif a == None: xi[i] = np.sqrt((b - v + 1.0) ** 2.0 - 1) else: xi[i] = np.arcsin(2.0 * (v - a) / (b - a) - 1.0) return xi
external2internal
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(xe, bounds):\n xi = np.empty_like(xe)\n for i, (v, bound) in enumerate(zip(xe, bounds)):\n a = bound[0]\n b = bound[1]\n if a == None and b == None:\n xi[i] = v\n elif b == None:\n xi[i] = np.sqrt((v - a + 1.0) ** 2.0 - 1)\n elif a == None:\n xi[i] = np.sqrt((b - v + 1.0) ** 2.0 - 1)\n else:\n xi[i] = np.arcsin(2.0 * (v - a) / (b - a) - 1.0)\n return xi\n", "role": "user" }, { "content": "external2internal", "role": "assistant" } ]
def x(infodic, p): fjac = infodic['fjac'] ipvt = infodic['ipvt'] n = len(p) perm = np.take(np.eye(n), ipvt - 1, 0) r = np.triu(np.transpose(fjac)[:n, :]) R = np.dot(r, perm) try: cov_x = np.linalg.inv(np.dot(np.transpose(R), R)) except LinAlgError: cov_x = None return cov_x
calc_cov_x
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(infodic, p):\n fjac = infodic['fjac']\n ipvt = infodic['ipvt']\n n = len(p)\n perm = np.take(np.eye(n), ipvt - 1, 0)\n r = np.triu(np.transpose(fjac)[:n, :])\n R = np.dot(r, perm)\n try:\n cov_x = np.linalg.inv(np.dot(np.transpose(R), R))\n except LinAlgError:\n cov_x = None\n return cov_x\n", "role": "user" }, { "content": "calc_cov_x", "role": "assistant" } ]
def x(func, x0, bounds, args=(), **kw): if 'full_output' in kw and kw['full_output']: full = True else: full = False i0 = external2internal(x0, bounds) r = leastsq(err, i0, args=(bounds, func, args), **kw) if full: xi, cov_xi, infodic, mesg, ier = r xe = internal2external(xi, bounds) cov_xe = i2e_cov_x(xi, bounds, cov_xi) return xe, cov_xe, infodic, mesg, ier else: xi, ier = r xe = internal2external(xi, bounds) return xe, ier
leastsqbound
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(func, x0, bounds, args=(), **kw):\n if 'full_output' in kw and kw['full_output']:\n full = True\n else:\n full = False\n i0 = external2internal(x0, bounds)\n r = leastsq(err, i0, args=(bounds, func, args), **kw)\n if full:\n xi, cov_xi, infodic, mesg, ier = r\n xe = internal2external(xi, bounds)\n cov_xe = i2e_cov_x(xi, bounds, cov_xi)\n return xe, cov_xe, infodic, mesg, ier\n else:\n xi, ier = r\n xe = internal2external(xi, bounds)\n return xe, ier\n", "role": "user" }, { "content": "leastsqbound", "role": "assistant" } ]
def x(self): if self.set_status: self.github_repo.create_status(state='pending', description= 'Static analysis in progress.', context='inline-plz', sha=self. last_sha)
start_review
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n if self.set_status:\n self.github_repo.create_status(state='pending', description=\n 'Static analysis in progress.', context='inline-plz', sha=self.\n last_sha)\n", "role": "user" }, { "content": "start_review", "role": "assistant" } ]
def x(self, success=True, error=False): if self.set_status: if error: self.github_repo.create_status(state='error', description= 'Static analysis error! inline-plz failed to run.', context ='inline-plz', sha=self.last_sha) elif success: self.github_repo.create_status(state='success', description= 'Static analysis complete! No errors found in your PR.', context='inline-plz', sha=self.last_sha) else: self.github_repo.create_status(state='failure', description= 'Static analysis complete! Found errors in your PR.', context='inline-plz', sha=self.last_sha)
finish_review
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, success=True, error=False):\n if self.set_status:\n if error:\n self.github_repo.create_status(state='error', description=\n 'Static analysis error! inline-plz failed to run.', context\n ='inline-plz', sha=self.last_sha)\n elif success:\n self.github_repo.create_status(state='success', description=\n 'Static analysis complete! No errors found in your PR.',\n context='inline-plz', sha=self.last_sha)\n else:\n self.github_repo.create_status(state='failure', description=\n 'Static analysis complete! Found errors in your PR.',\n context='inline-plz', sha=self.last_sha)\n", "role": "user" }, { "content": "finish_review", "role": "assistant" } ]
def x(self): try: latest_remote_sha = self.pr_commits(self.pull_request.refresh(True))[-1 ].sha print('Latest remote sha: {}'.format(latest_remote_sha)) try: print('Ratelimit remaining: {}'.format(self.github. ratelimit_remaining)) except Exception: print('Failed to look up ratelimit remaining') return self.last_sha != latest_remote_sha except IndexError: return False
out_of_date
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n try:\n latest_remote_sha = self.pr_commits(self.pull_request.refresh(True))[-1\n ].sha\n print('Latest remote sha: {}'.format(latest_remote_sha))\n try:\n print('Ratelimit remaining: {}'.format(self.github.\n ratelimit_remaining))\n except Exception:\n print('Failed to look up ratelimit remaining')\n return self.last_sha != latest_remote_sha\n except IndexError:\n return False\n", "role": "user" }, { "content": "out_of_date", "role": "assistant" } ]
def x(self, message): if not message.line_number: message.line_number = 1 for patched_file in self.patch: target = patched_file.target_file.lstrip('b/') if target == message.path: offset = 1 for hunk in patched_file: for position, hunk_line in enumerate(hunk): if hunk_line.target_line_no == message.line_number: if not hunk_line.is_added: return return position + offset offset += len(hunk) + 1
position
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, message):\n if not message.line_number:\n message.line_number = 1\n for patched_file in self.patch:\n target = patched_file.target_file.lstrip('b/')\n if target == message.path:\n offset = 1\n for hunk in patched_file:\n for position, hunk_line in enumerate(hunk):\n if hunk_line.target_line_no == message.line_number:\n if not hunk_line.is_added:\n return\n return position + offset\n offset += len(hunk) + 1\n", "role": "user" }, { "content": "position", "role": "assistant" } ]
def x(self): OpenGL.GL.glFlush() OpenGL.GL.glClear(OpenGL.GL.GL_COLOR_BUFFER_BIT | OpenGL.GL. GL_DEPTH_BUFFER_BIT) if self.white == -1: OpenGL.GL.glClearColor(1, 1, 1, 1) else: OpenGL.GL.glClearColor(0, 0, 0, 0) OpenGL.GL.glMatrixMode(OpenGL.GL.GL_MODELVIEW) OpenGL.GL.glLoadIdentity() OpenGL.GL.glTranslatef(self.initrans.x, self.initrans.y, self.initrans.z) OpenGL.GL.glRotatef(self.rotate.x, 1, 0, 0) OpenGL.GL.glRotatef(self.rotate.y, 0, 1, 0) OpenGL.GL.glTranslatef(self.translate.x, self.translate.y, self.translate.z ) self.render_image() self.render() glutil.draw_axes() OpenGL.GL.glMatrixMode(OpenGL.GL.GL_PROJECTION) OpenGL.GL.glPushMatrix() OpenGL.GL.glLoadIdentity() OpenGL.GL.glOrtho(0.0, self.width, self.height, 0.0, -1.0, 10.0) OpenGL.GL.glMatrixMode(OpenGL.GL.GL_MODELVIEW) OpenGL.GL.glLoadIdentity() OpenGL.GL.glDisable(OpenGL.GL.GL_CULL_FACE) OpenGL.GL.glClear(OpenGL.GL.GL_DEPTH_BUFFER_BIT) if self.white == 1: self.glut_print(10, 15, OpenGL.GLUT.GLUT_BITMAP_9_BY_15, self. str_curr_cycle, 0.0, 1.0, 0.0, 1.0) self.glut_print(10, 30, OpenGL.GLUT.GLUT_BITMAP_9_BY_15, self. str_curr_temp, 0.0, 1.0, 0.0, 1.0) self.glut_print(10, 45, OpenGL.GLUT.GLUT_BITMAP_9_BY_15, self. str_curr_id_frag, 0.0, 1.0, 0.0, 1.0) self.glut_print(10, 60, OpenGL.GLUT.GLUT_BITMAP_9_BY_15, self. str_likelihood, 0.0, 1.0, 0.0, 1.0) self.glut_print(10, 75, OpenGL.GLUT.GLUT_BITMAP_9_BY_15, self. str_n_contigs, 0.0, 1.0, 0.0, 1.0) self.glut_print(10, 90, OpenGL.GLUT.GLUT_BITMAP_9_BY_15, self. str_curr_dist, 0.0, 1.0, 0.0, 1.0) else: self.glut_print(10, 15, OpenGL.GLUT.GLUT_BITMAP_9_BY_15, self. str_curr_cycle, 0.0, 0.0, 0.0, 1.0) self.glut_print(10, 30, OpenGL.GLUT.GLUT_BITMAP_9_BY_15, self. str_curr_temp, 0.0, 0.0, 0.0, 1.0) self.glut_print(10, 45, OpenGL.GLUT.GLUT_BITMAP_9_BY_15, self. str_curr_id_frag, 0.0, 0.0, 0.0, 1.0) self.glut_print(10, 60, OpenGL.GLUT.GLUT_BITMAP_9_BY_15, self. str_likelihood, 0.0, 0.0, 0.0, 1.0) self.glut_print(10, 75, OpenGL.GLUT.GLUT_BITMAP_9_BY_15, self. str_n_contigs, 0.0, 0.0, 0.0, 1.0) self.glut_print(10, 90, OpenGL.GLUT.GLUT_BITMAP_9_BY_15, self. str_curr_dist, 0.0, 0.0, 0.0, 1.0) OpenGL.GL.glMatrixMode(OpenGL.GL.GL_PROJECTION) OpenGL.GL.glPopMatrix() OpenGL.GL.glMatrixMode(OpenGL.GL.GL_MODELVIEW) OpenGL.GLUT.glutSwapBuffers()
draw
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n OpenGL.GL.glFlush()\n OpenGL.GL.glClear(OpenGL.GL.GL_COLOR_BUFFER_BIT | OpenGL.GL.\n GL_DEPTH_BUFFER_BIT)\n if self.white == -1:\n OpenGL.GL.glClearColor(1, 1, 1, 1)\n else:\n OpenGL.GL.glClearColor(0, 0, 0, 0)\n OpenGL.GL.glMatrixMode(OpenGL.GL.GL_MODELVIEW)\n OpenGL.GL.glLoadIdentity()\n OpenGL.GL.glTranslatef(self.initrans.x, self.initrans.y, self.initrans.z)\n OpenGL.GL.glRotatef(self.rotate.x, 1, 0, 0)\n OpenGL.GL.glRotatef(self.rotate.y, 0, 1, 0)\n OpenGL.GL.glTranslatef(self.translate.x, self.translate.y, self.translate.z\n )\n self.render_image()\n self.render()\n glutil.draw_axes()\n OpenGL.GL.glMatrixMode(OpenGL.GL.GL_PROJECTION)\n OpenGL.GL.glPushMatrix()\n OpenGL.GL.glLoadIdentity()\n OpenGL.GL.glOrtho(0.0, self.width, self.height, 0.0, -1.0, 10.0)\n OpenGL.GL.glMatrixMode(OpenGL.GL.GL_MODELVIEW)\n OpenGL.GL.glLoadIdentity()\n OpenGL.GL.glDisable(OpenGL.GL.GL_CULL_FACE)\n OpenGL.GL.glClear(OpenGL.GL.GL_DEPTH_BUFFER_BIT)\n if self.white == 1:\n self.glut_print(10, 15, OpenGL.GLUT.GLUT_BITMAP_9_BY_15, self.\n str_curr_cycle, 0.0, 1.0, 0.0, 1.0)\n self.glut_print(10, 30, OpenGL.GLUT.GLUT_BITMAP_9_BY_15, self.\n str_curr_temp, 0.0, 1.0, 0.0, 1.0)\n self.glut_print(10, 45, OpenGL.GLUT.GLUT_BITMAP_9_BY_15, self.\n str_curr_id_frag, 0.0, 1.0, 0.0, 1.0)\n self.glut_print(10, 60, OpenGL.GLUT.GLUT_BITMAP_9_BY_15, self.\n str_likelihood, 0.0, 1.0, 0.0, 1.0)\n self.glut_print(10, 75, OpenGL.GLUT.GLUT_BITMAP_9_BY_15, self.\n str_n_contigs, 0.0, 1.0, 0.0, 1.0)\n self.glut_print(10, 90, OpenGL.GLUT.GLUT_BITMAP_9_BY_15, self.\n str_curr_dist, 0.0, 1.0, 0.0, 1.0)\n else:\n self.glut_print(10, 15, OpenGL.GLUT.GLUT_BITMAP_9_BY_15, self.\n str_curr_cycle, 0.0, 0.0, 0.0, 1.0)\n self.glut_print(10, 30, OpenGL.GLUT.GLUT_BITMAP_9_BY_15, self.\n str_curr_temp, 0.0, 0.0, 0.0, 1.0)\n self.glut_print(10, 45, OpenGL.GLUT.GLUT_BITMAP_9_BY_15, self.\n str_curr_id_frag, 0.0, 0.0, 0.0, 1.0)\n self.glut_print(10, 60, OpenGL.GLUT.GLUT_BITMAP_9_BY_15, self.\n str_likelihood, 0.0, 0.0, 0.0, 1.0)\n self.glut_print(10, 75, OpenGL.GLUT.GLUT_BITMAP_9_BY_15, self.\n str_n_contigs, 0.0, 0.0, 0.0, 1.0)\n self.glut_print(10, 90, OpenGL.GLUT.GLUT_BITMAP_9_BY_15, self.\n str_curr_dist, 0.0, 0.0, 0.0, 1.0)\n OpenGL.GL.glMatrixMode(OpenGL.GL.GL_PROJECTION)\n OpenGL.GL.glPopMatrix()\n OpenGL.GL.glMatrixMode(OpenGL.GL.GL_MODELVIEW)\n OpenGL.GLUT.glutSwapBuffers()\n", "role": "user" }, { "content": "draw", "role": "assistant" } ]
def x(base_folder, size_pyramid, factor, min_bin_per_contig): fact_sub_sampling = factor contig_info = os.path.join(base_folder, 'info_contigs.txt') fragments_list = os.path.join(base_folder, 'fragments_list.txt') init_abs_fragments_contacts = os.path.join(base_folder, 'abs_fragments_contacts_weighted.txt') all_pyramid_folder = os.path.join(base_folder, 'pyramids') pyramid_folder = os.path.join(all_pyramid_folder, 'pyramid_' + str( size_pyramid) + '_no_thresh') if not os.path.exists(all_pyramid_folder): os.mkdir(all_pyramid_folder) if not os.path.exists(pyramid_folder): os.mkdir(pyramid_folder) hdf5_pyramid_file = os.path.join(pyramid_folder, 'pyramid.hdf5') pyramid_handle = h5py.File(hdf5_pyramid_file) level = 0 pyramid_level_folder = os.path.join(pyramid_folder, 'level_' + str(level)) if not os.path.exists(pyramid_level_folder): os.mkdir(pyramid_level_folder) current_contig_info = os.path.join(pyramid_level_folder, str(level) + '_contig_info.txt') current_frag_list = os.path.join(pyramid_level_folder, str(level) + '_fragments_list.txt') current_abs_fragments_contacts = os.path.join(pyramid_level_folder, str (level) + '_abs_frag_contacts.txt') for level in range(0, size_pyramid): pyramid_level_folder = os.path.join(pyramid_folder, 'level_' + str( level)) if not os.path.exists(pyramid_level_folder): os.mkdir(pyramid_level_folder) level_pyramid = str(level) + '_' if level == 0: shutil.copyfile(contig_info, current_contig_info) shutil.copyfile(init_abs_fragments_contacts, current_abs_fragments_contacts) nfrags = init_frag_list(fragments_list, current_frag_list) new_abs_fragments_contacts_file = current_abs_fragments_contacts new_contig_list_file = current_contig_info new_fragments_list_file = current_frag_list sub_2_super_frag_index_file = os.path.join(pyramid_level_folder, level_pyramid + 'sub_2_super_index_frag.txt') else: new_contig_list_file = os.path.join(pyramid_level_folder, level_pyramid + 'contig_info.txt') new_fragments_list_file = os.path.join(pyramid_level_folder, level_pyramid + 'fragments_list.txt') new_abs_fragments_contacts_file = os.path.join(pyramid_level_folder , level_pyramid + 'abs_frag_contacts.txt') if os.path.exists(new_contig_list_file) and os.path.exists( new_fragments_list_file) and os.path.exists( new_abs_fragments_contacts_file) and os.path.exists( sub_2_super_frag_index_file): logger.info('level already built...') nfrags = file_len(new_fragments_list_file) - 1 else: logger.info('writing new_files..') nfrags = subsample_data_set(current_contig_info, current_frag_list, fact_sub_sampling, current_abs_fragments_contacts, new_abs_fragments_contacts_file, min_bin_per_contig, new_contig_list_file, new_fragments_list_file, sub_2_super_frag_index_file) try: status = pyramid_handle.attrs[str(level)] == 'done' except KeyError: pyramid_handle.attrs[str(level)] = 'pending' status = False if not status: logger.info('Start filling the pyramid') fill_sparse_pyramid_level(pyramid_handle, level, new_abs_fragments_contacts_file, nfrags) pyramid_handle.attrs[str(level)] = 'done' current_frag_list = new_fragments_list_file current_contig_info = new_contig_list_file current_abs_fragments_contacts = new_abs_fragments_contacts_file sub_2_super_frag_index_file = os.path.join(pyramid_level_folder, level_pyramid + 'sub_2_super_index_frag.txt') logger.info('pyramid built.') pyramid_handle.close()
build
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(base_folder, size_pyramid, factor, min_bin_per_contig):\n fact_sub_sampling = factor\n contig_info = os.path.join(base_folder, 'info_contigs.txt')\n fragments_list = os.path.join(base_folder, 'fragments_list.txt')\n init_abs_fragments_contacts = os.path.join(base_folder,\n 'abs_fragments_contacts_weighted.txt')\n all_pyramid_folder = os.path.join(base_folder, 'pyramids')\n pyramid_folder = os.path.join(all_pyramid_folder, 'pyramid_' + str(\n size_pyramid) + '_no_thresh')\n if not os.path.exists(all_pyramid_folder):\n os.mkdir(all_pyramid_folder)\n if not os.path.exists(pyramid_folder):\n os.mkdir(pyramid_folder)\n hdf5_pyramid_file = os.path.join(pyramid_folder, 'pyramid.hdf5')\n pyramid_handle = h5py.File(hdf5_pyramid_file)\n level = 0\n pyramid_level_folder = os.path.join(pyramid_folder, 'level_' + str(level))\n if not os.path.exists(pyramid_level_folder):\n os.mkdir(pyramid_level_folder)\n current_contig_info = os.path.join(pyramid_level_folder, str(level) +\n '_contig_info.txt')\n current_frag_list = os.path.join(pyramid_level_folder, str(level) +\n '_fragments_list.txt')\n current_abs_fragments_contacts = os.path.join(pyramid_level_folder, str\n (level) + '_abs_frag_contacts.txt')\n for level in range(0, size_pyramid):\n pyramid_level_folder = os.path.join(pyramid_folder, 'level_' + str(\n level))\n if not os.path.exists(pyramid_level_folder):\n os.mkdir(pyramid_level_folder)\n level_pyramid = str(level) + '_'\n if level == 0:\n shutil.copyfile(contig_info, current_contig_info)\n shutil.copyfile(init_abs_fragments_contacts,\n current_abs_fragments_contacts)\n nfrags = init_frag_list(fragments_list, current_frag_list)\n new_abs_fragments_contacts_file = current_abs_fragments_contacts\n new_contig_list_file = current_contig_info\n new_fragments_list_file = current_frag_list\n sub_2_super_frag_index_file = os.path.join(pyramid_level_folder,\n level_pyramid + 'sub_2_super_index_frag.txt')\n else:\n new_contig_list_file = os.path.join(pyramid_level_folder, \n level_pyramid + 'contig_info.txt')\n new_fragments_list_file = os.path.join(pyramid_level_folder, \n level_pyramid + 'fragments_list.txt')\n new_abs_fragments_contacts_file = os.path.join(pyramid_level_folder\n , level_pyramid + 'abs_frag_contacts.txt')\n if os.path.exists(new_contig_list_file) and os.path.exists(\n new_fragments_list_file) and os.path.exists(\n new_abs_fragments_contacts_file) and os.path.exists(\n sub_2_super_frag_index_file):\n logger.info('level already built...')\n nfrags = file_len(new_fragments_list_file) - 1\n else:\n logger.info('writing new_files..')\n nfrags = subsample_data_set(current_contig_info,\n current_frag_list, fact_sub_sampling,\n current_abs_fragments_contacts,\n new_abs_fragments_contacts_file, min_bin_per_contig,\n new_contig_list_file, new_fragments_list_file,\n sub_2_super_frag_index_file)\n try:\n status = pyramid_handle.attrs[str(level)] == 'done'\n except KeyError:\n pyramid_handle.attrs[str(level)] = 'pending'\n status = False\n if not status:\n logger.info('Start filling the pyramid')\n fill_sparse_pyramid_level(pyramid_handle, level,\n new_abs_fragments_contacts_file, nfrags)\n pyramid_handle.attrs[str(level)] = 'done'\n current_frag_list = new_fragments_list_file\n current_contig_info = new_contig_list_file\n current_abs_fragments_contacts = new_abs_fragments_contacts_file\n sub_2_super_frag_index_file = os.path.join(pyramid_level_folder, \n level_pyramid + 'sub_2_super_index_frag.txt')\n logger.info('pyramid built.')\n pyramid_handle.close()\n", "role": "user" }, { "content": "build", "role": "assistant" } ]
def x(abs_contact_file, coo_file): sparse_dict = dict() h = open(abs_contact_file, 'r') all_lines = h.readlines() n_lines = len(all_lines) for i in range(1, n_lines): line = all_lines[i] dat = line.split() mates = [int(dat[0]), int(dat[1])] mates.sort() f1 = mates[0] - 1 f2 = mates[1] - 1 if f1 in sparse_dict: if f2 in sparse_dict[f1]: sparse_dict[f1][f2] += 1 else: sparse_dict[f1][f2] = 1 else: sparse_dict[f1] = dict() sparse_dict[f1][f2] = 1 keys = list(sparse_dict.keys()) keys.sort() h.close() h_coo = open(coo_file, 'w') h_coo.write('%s\t%s\t%s\n' % ('id_frag_a', 'id_frag_b', 'n_contact')) for fa in keys: d_fb = sparse_dict[fa] keys_b = list(d_fb.keys()) keys_b.sort() for fb in keys_b: nc = d_fb[fb] h_coo.write('%s\t%s\t%s\n' % (str(fa), str(fb), str(nc))) h_coo.close() h.close()
abs_contact_2_coo_file
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(abs_contact_file, coo_file):\n sparse_dict = dict()\n h = open(abs_contact_file, 'r')\n all_lines = h.readlines()\n n_lines = len(all_lines)\n for i in range(1, n_lines):\n line = all_lines[i]\n dat = line.split()\n mates = [int(dat[0]), int(dat[1])]\n mates.sort()\n f1 = mates[0] - 1\n f2 = mates[1] - 1\n if f1 in sparse_dict:\n if f2 in sparse_dict[f1]:\n sparse_dict[f1][f2] += 1\n else:\n sparse_dict[f1][f2] = 1\n else:\n sparse_dict[f1] = dict()\n sparse_dict[f1][f2] = 1\n keys = list(sparse_dict.keys())\n keys.sort()\n h.close()\n h_coo = open(coo_file, 'w')\n h_coo.write('%s\\t%s\\t%s\\n' % ('id_frag_a', 'id_frag_b', 'n_contact'))\n for fa in keys:\n d_fb = sparse_dict[fa]\n keys_b = list(d_fb.keys())\n keys_b.sort()\n for fb in keys_b:\n nc = d_fb[fb]\n h_coo.write('%s\\t%s\\t%s\\n' % (str(fa), str(fb), str(nc)))\n h_coo.close()\n h.close()\n", "role": "user" }, { "content": "abs_contact_2_coo_file", "role": "assistant" } ]
def x(pyramid_handle, level, contact_file, nfrags): sparse_dict = dict() h = open(contact_file, 'r') all_lines = h.readlines() n_lines = len(all_lines) for i in range(1, n_lines): line = all_lines[i] dat = line.split() mates = [int(dat[0]), int(dat[1])] nc = int(dat[2]) mates.sort() f1 = mates[0] f2 = mates[1] if f1 in sparse_dict: if f2 in sparse_dict[f1]: sparse_dict[f1][f2] += nc else: sparse_dict[f1][f2] = nc else: sparse_dict[f1] = dict() sparse_dict[f1][f2] = nc keys = list(sparse_dict.keys()) keys.sort() out_r = [] out_c = [] out_d = [] for r in keys: data = sparse_dict[r] for c in list(data.keys()): out_r.append(r) out_c.append(c) out_d.append(data[c]) n_on_pxls = len(out_d) level_hdf5 = pyramid_handle.create_group(str(level)) data_2_sparse = level_hdf5.create_dataset('data', (3, n_on_pxls), 'i') data_nfrags = level_hdf5.create_dataset('nfrags', (1, 1), 'i') np_csr = np.zeros((3, n_on_pxls), dtype=np.int32) np_csr[0, :] = out_r np_csr[1, :] = out_c np_csr[2, :] = out_d data_2_sparse[0, :] = out_r data_2_sparse[1, :] = out_c data_2_sparse[2, :] = out_d data_nfrags[:] = nfrags
fill_sparse_pyramid_level
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(pyramid_handle, level, contact_file, nfrags):\n sparse_dict = dict()\n h = open(contact_file, 'r')\n all_lines = h.readlines()\n n_lines = len(all_lines)\n for i in range(1, n_lines):\n line = all_lines[i]\n dat = line.split()\n mates = [int(dat[0]), int(dat[1])]\n nc = int(dat[2])\n mates.sort()\n f1 = mates[0]\n f2 = mates[1]\n if f1 in sparse_dict:\n if f2 in sparse_dict[f1]:\n sparse_dict[f1][f2] += nc\n else:\n sparse_dict[f1][f2] = nc\n else:\n sparse_dict[f1] = dict()\n sparse_dict[f1][f2] = nc\n keys = list(sparse_dict.keys())\n keys.sort()\n out_r = []\n out_c = []\n out_d = []\n for r in keys:\n data = sparse_dict[r]\n for c in list(data.keys()):\n out_r.append(r)\n out_c.append(c)\n out_d.append(data[c])\n n_on_pxls = len(out_d)\n level_hdf5 = pyramid_handle.create_group(str(level))\n data_2_sparse = level_hdf5.create_dataset('data', (3, n_on_pxls), 'i')\n data_nfrags = level_hdf5.create_dataset('nfrags', (1, 1), 'i')\n np_csr = np.zeros((3, n_on_pxls), dtype=np.int32)\n np_csr[0, :] = out_r\n np_csr[1, :] = out_c\n np_csr[2, :] = out_d\n data_2_sparse[0, :] = out_r\n data_2_sparse[1, :] = out_c\n data_2_sparse[2, :] = out_d\n data_nfrags[:] = nfrags\n", "role": "user" }, { "content": "fill_sparse_pyramid_level", "role": "assistant" } ]
def x(fragment_list, new_frag_list): handle_frag_list = open(fragment_list, 'r') handle_new_frag_list = open(new_frag_list, 'w') handle_new_frag_list.write('%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n' % ( 'id', 'chrom', 'start_pos', 'end_pos', 'size', 'gc_content', 'accu_frag', 'frag_start', 'frag_end')) handle_frag_list.readline() i = 0 while 1: line_frag = handle_frag_list.readline() if not line_frag: handle_frag_list.close() handle_new_frag_list.close() break i += 1 data = line_frag.split('\t') id_init = data[0] contig_name = data[1] start_pos = data[2] end_pos = data[3] length_kb = data[4] gc_content = str(float(data[5])) accu_frag = str(1) frag_start = id_init frag_end = id_init handle_new_frag_list.write('%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n' % (id_init, contig_name, start_pos, end_pos, length_kb, gc_content, accu_frag, frag_start, frag_end)) return i
init_frag_list
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(fragment_list, new_frag_list):\n handle_frag_list = open(fragment_list, 'r')\n handle_new_frag_list = open(new_frag_list, 'w')\n handle_new_frag_list.write('%s\\t%s\\t%s\\t%s\\t%s\\t%s\\t%s\\t%s\\t%s\\n' % (\n 'id', 'chrom', 'start_pos', 'end_pos', 'size', 'gc_content',\n 'accu_frag', 'frag_start', 'frag_end'))\n handle_frag_list.readline()\n i = 0\n while 1:\n line_frag = handle_frag_list.readline()\n if not line_frag:\n handle_frag_list.close()\n handle_new_frag_list.close()\n break\n i += 1\n data = line_frag.split('\\t')\n id_init = data[0]\n contig_name = data[1]\n start_pos = data[2]\n end_pos = data[3]\n length_kb = data[4]\n gc_content = str(float(data[5]))\n accu_frag = str(1)\n frag_start = id_init\n frag_end = id_init\n handle_new_frag_list.write('%s\\t%s\\t%s\\t%s\\t%s\\t%s\\t%s\\t%s\\t%s\\n' %\n (id_init, contig_name, start_pos, end_pos, length_kb,\n gc_content, accu_frag, frag_start, frag_end))\n return i\n", "role": "user" }, { "content": "init_frag_list", "role": "assistant" } ]
def x(self, curr_frag): level = curr_frag[1] frag = curr_frag[0] output = [] if level > 0: str_level = str(level) sub_low = self.spec_level[str_level]['fragments_dict'][frag][ 'sub_low_index'] sub_high = self.spec_level[str_level]['fragments_dict'][frag][ 'sub_high_index'] new_level = level - 1 for i in range(sub_low, sub_high + 1): output.append((i, new_level)) else: output.append(curr_frag) return output
zoom_in_frag
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, curr_frag):\n level = curr_frag[1]\n frag = curr_frag[0]\n output = []\n if level > 0:\n str_level = str(level)\n sub_low = self.spec_level[str_level]['fragments_dict'][frag][\n 'sub_low_index']\n sub_high = self.spec_level[str_level]['fragments_dict'][frag][\n 'sub_high_index']\n new_level = level - 1\n for i in range(sub_low, sub_high + 1):\n output.append((i, new_level))\n else:\n output.append(curr_frag)\n return output\n", "role": "user" }, { "content": "zoom_in_frag", "role": "assistant" } ]
def x(self, curr_frag): level = curr_frag[1] frag = curr_frag[0] output = [] if level > 0: str_level = str(level) high_frag = self.spec_level[str_level]['fragments_dict'][frag][ 'super_index'] new_level = level + 1 output = high_frag, new_level else: output = curr_frag return output
zoom_out_frag
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, curr_frag):\n level = curr_frag[1]\n frag = curr_frag[0]\n output = []\n if level > 0:\n str_level = str(level)\n high_frag = self.spec_level[str_level]['fragments_dict'][frag][\n 'super_index']\n new_level = level + 1\n output = high_frag, new_level\n else:\n output = curr_frag\n return output\n", "role": "user" }, { "content": "zoom_out_frag", "role": "assistant" } ]
def x(self, curr_pixel): low_frag = curr_pixel[0] high_frag = curr_pixel[1] level = curr_pixel[2] if level > 0: str_level = str(level) low_sub_low = self.spec_level[str_level]['fragments_dict'][low_frag][ 'sub_low_index'] low_sub_high = self.spec_level[str_level]['fragments_dict'][low_frag][ 'sub_high_index'] high_sub_low = self.spec_level[str_level]['fragments_dict'][high_frag][ 'sub_low_index'] high_sub_high = self.spec_level[str_level]['fragments_dict'][high_frag ]['sub_high_index'] vect = [low_sub_low, low_sub_high, high_sub_low, high_sub_high] new_pix_low = min(vect) new_pix_high = max(vect) new_level = level - 1 new_pixel = [new_pix_low, new_pix_high, new_level] else: new_pixel = curr_pixel return new_pixel
zoom_in_pixel
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, curr_pixel):\n low_frag = curr_pixel[0]\n high_frag = curr_pixel[1]\n level = curr_pixel[2]\n if level > 0:\n str_level = str(level)\n low_sub_low = self.spec_level[str_level]['fragments_dict'][low_frag][\n 'sub_low_index']\n low_sub_high = self.spec_level[str_level]['fragments_dict'][low_frag][\n 'sub_high_index']\n high_sub_low = self.spec_level[str_level]['fragments_dict'][high_frag][\n 'sub_low_index']\n high_sub_high = self.spec_level[str_level]['fragments_dict'][high_frag\n ]['sub_high_index']\n vect = [low_sub_low, low_sub_high, high_sub_low, high_sub_high]\n new_pix_low = min(vect)\n new_pix_high = max(vect)\n new_level = level - 1\n new_pixel = [new_pix_low, new_pix_high, new_level]\n else:\n new_pixel = curr_pixel\n return new_pixel\n", "role": "user" }, { "content": "zoom_in_pixel", "role": "assistant" } ]
def x(self, curr_pixel): low_frag = curr_pixel[0] high_frag = curr_pixel[1] level = curr_pixel[2] str_level = str(level) if level < self.n_level - 1: low_super = self.spec_level[str_level]['fragments_dict'][low_frag][ 'super_index'] high_super = self.spec_level[str_level]['fragments_dict'][high_frag][ 'sub_index'] new_pix_low = min([low_super, high_super]) new_pix_high = max([low_super, high_super]) new_level = level + 1 new_pixel = [new_pix_low, new_pix_high, new_level] else: new_pixel = curr_pixel return new_pixel
zoom_out_pixel
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, curr_pixel):\n low_frag = curr_pixel[0]\n high_frag = curr_pixel[1]\n level = curr_pixel[2]\n str_level = str(level)\n if level < self.n_level - 1:\n low_super = self.spec_level[str_level]['fragments_dict'][low_frag][\n 'super_index']\n high_super = self.spec_level[str_level]['fragments_dict'][high_frag][\n 'sub_index']\n new_pix_low = min([low_super, high_super])\n new_pix_high = max([low_super, high_super])\n new_level = level + 1\n new_pixel = [new_pix_low, new_pix_high, new_level]\n else:\n new_pixel = curr_pixel\n return new_pixel\n", "role": "user" }, { "content": "zoom_out_pixel", "role": "assistant" } ]
def x(self, area): x = area[0] y = area[1] level = x[2] logger.debug('x = {}'.format(x)) logger.debug('y = {}'.format(y)) logger.debug('level = {}'.format(level)) if level == y[2] and level > 0: new_level = level - 1 high_x = self.zoom_in_pixel(x) high_y = self.zoom_in_pixel(y) new_x = [min([high_x[0], high_y[0]]), min([high_x[1], high_y[1]]), new_level] new_y = [max([high_x[0], high_y[0]]), max([high_x[1], high_y[1]]), new_level] new_area = [new_x, new_y] else: new_area = area return new_area
zoom_in_area
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, area):\n x = area[0]\n y = area[1]\n level = x[2]\n logger.debug('x = {}'.format(x))\n logger.debug('y = {}'.format(y))\n logger.debug('level = {}'.format(level))\n if level == y[2] and level > 0:\n new_level = level - 1\n high_x = self.zoom_in_pixel(x)\n high_y = self.zoom_in_pixel(y)\n new_x = [min([high_x[0], high_y[0]]), min([high_x[1], high_y[1]]),\n new_level]\n new_y = [max([high_x[0], high_y[0]]), max([high_x[1], high_y[1]]),\n new_level]\n new_area = [new_x, new_y]\n else:\n new_area = area\n return new_area\n", "role": "user" }, { "content": "zoom_in_area", "role": "assistant" } ]
def x(args, config_path='.inlineplz.yml'): config = {} try: with open(config_path) as configfile: config = yaml.safe_load(configfile) or {} if config: print('Loaded config from {}'.format(config_path)) pprint.pprint(config) except (IOError, OSError, yaml.parser.ParserError): traceback.print_exc() args = update_from_config(args, config) args.ignore_paths = args.__dict__.get('ignore_paths') or ['node_modules', '.git', '.tox', 'godeps', 'vendor', 'site-packages', 'venv', '.env', 'spec', 'migrate', 'bin', 'fixtures', 'cassettes', '.cache', '.idea', '.pytest_cache', '__pycache__', 'dist'] if config_path != '.inlineplz.yml': return args if args.config_dir and not config: new_config_path = os.path.join(args.config_dir, config_path) if os.path.exists(new_config_path): return load_config(args, new_config_path) return args
load_config
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(args, config_path='.inlineplz.yml'):\n config = {}\n try:\n with open(config_path) as configfile:\n config = yaml.safe_load(configfile) or {}\n if config:\n print('Loaded config from {}'.format(config_path))\n pprint.pprint(config)\n except (IOError, OSError, yaml.parser.ParserError):\n traceback.print_exc()\n args = update_from_config(args, config)\n args.ignore_paths = args.__dict__.get('ignore_paths') or ['node_modules',\n '.git', '.tox', 'godeps', 'vendor', 'site-packages', 'venv', '.env',\n 'spec', 'migrate', 'bin', 'fixtures', 'cassettes', '.cache',\n '.idea', '.pytest_cache', '__pycache__', 'dist']\n if config_path != '.inlineplz.yml':\n return args\n if args.config_dir and not config:\n new_config_path = os.path.join(args.config_dir, config_path)\n if os.path.exists(new_config_path):\n return load_config(args, new_config_path)\n return args\n", "role": "user" }, { "content": "load_config", "role": "assistant" } ]
def x(args): trusted = args.trusted args = load_config(args) print('Args:') pprint.pprint(args) ret_code = 0 url = args.url if args.repo_slug: owner = args.repo_slug.split('/')[0] repo = args.repo_slug.split('/')[1] else: owner = args.owner repo = args.repo if args.url: try: url_to_parse = args.url if not url_to_parse.endswith('.git'): url_to_parse += '.git' parsed = giturlparse.parse(str(url_to_parse)) url = parsed.resource if not url.startswith('https://'): url = 'https://' + url if parsed.owner: owner = parsed.owner if parsed.name: repo = parsed.name except giturlparse.parser.ParserError: pass if not args.dryrun and args.interface not in interfaces.INTERFACES: print('Valid inline-plz config not found') return 1 print('Using interface: {0}'.format(args.interface)) my_interface = None filenames = None if not args.dryrun: my_interface = interfaces.INTERFACES[args.interface](owner, repo, args.pull_request, args.branch, args.token, url, args.commit, args.ignore_paths, args.prefix, args.autofix, args.set_status) if not my_interface.is_valid(): print('Invalid review. Exiting.') return 0 filenames = my_interface.filenames my_interface.start_review() try: linter_runner = LinterRunner(args.install, args.autorun, args. ignore_paths, args.config_dir, args.enabled_linters, args. disabled_linters, args.autofix, trusted, filenames) messages = linter_runner.run_linters() except Exception: print('Linting failed:\n{}'.format(traceback.format_exc())) print('inline-plz version: {}'.format(__version__)) print('Python version: {}'.format(sys.version)) ret_code = 1 if my_interface: my_interface.finish_review(error=True) return ret_code print('{} lint messages found'.format(len(messages))) print('inline-plz version: {}'.format(__version__)) print('Python version: {}'.format(sys.version)) if args.dryrun: print_messages(messages) write_messages_to_json(messages) return ret_code try: if my_interface.post_messages(messages, args.max_comments): if not args.zero_exit: ret_code = 1 if args.delete_outdated: my_interface.clear_outdated_messages() my_interface.finish_review(success=False) write_messages_to_json(messages) return ret_code if args.delete_outdated: my_interface.clear_outdated_messages() my_interface.finish_review(success=True) except KeyError: print('Interface not found: {}'.format(args.interface)) traceback.print_exc() write_messages_to_json(messages) return ret_code
inline
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(args):\n trusted = args.trusted\n args = load_config(args)\n print('Args:')\n pprint.pprint(args)\n ret_code = 0\n url = args.url\n if args.repo_slug:\n owner = args.repo_slug.split('/')[0]\n repo = args.repo_slug.split('/')[1]\n else:\n owner = args.owner\n repo = args.repo\n if args.url:\n try:\n url_to_parse = args.url\n if not url_to_parse.endswith('.git'):\n url_to_parse += '.git'\n parsed = giturlparse.parse(str(url_to_parse))\n url = parsed.resource\n if not url.startswith('https://'):\n url = 'https://' + url\n if parsed.owner:\n owner = parsed.owner\n if parsed.name:\n repo = parsed.name\n except giturlparse.parser.ParserError:\n pass\n if not args.dryrun and args.interface not in interfaces.INTERFACES:\n print('Valid inline-plz config not found')\n return 1\n print('Using interface: {0}'.format(args.interface))\n my_interface = None\n filenames = None\n if not args.dryrun:\n my_interface = interfaces.INTERFACES[args.interface](owner, repo,\n args.pull_request, args.branch, args.token, url, args.commit,\n args.ignore_paths, args.prefix, args.autofix, args.set_status)\n if not my_interface.is_valid():\n print('Invalid review. Exiting.')\n return 0\n filenames = my_interface.filenames\n my_interface.start_review()\n try:\n linter_runner = LinterRunner(args.install, args.autorun, args.\n ignore_paths, args.config_dir, args.enabled_linters, args.\n disabled_linters, args.autofix, trusted, filenames)\n messages = linter_runner.run_linters()\n except Exception:\n print('Linting failed:\\n{}'.format(traceback.format_exc()))\n print('inline-plz version: {}'.format(__version__))\n print('Python version: {}'.format(sys.version))\n ret_code = 1\n if my_interface:\n my_interface.finish_review(error=True)\n return ret_code\n print('{} lint messages found'.format(len(messages)))\n print('inline-plz version: {}'.format(__version__))\n print('Python version: {}'.format(sys.version))\n if args.dryrun:\n print_messages(messages)\n write_messages_to_json(messages)\n return ret_code\n try:\n if my_interface.post_messages(messages, args.max_comments):\n if not args.zero_exit:\n ret_code = 1\n if args.delete_outdated:\n my_interface.clear_outdated_messages()\n my_interface.finish_review(success=False)\n write_messages_to_json(messages)\n return ret_code\n if args.delete_outdated:\n my_interface.clear_outdated_messages()\n my_interface.finish_review(success=True)\n except KeyError:\n print('Interface not found: {}'.format(args.interface))\n traceback.print_exc()\n write_messages_to_json(messages)\n return ret_code\n", "role": "user" }, { "content": "inline", "role": "assistant" } ]
def x(): for install_dir in linters.INSTALL_DIRS: try: shutil.rmtree(install_dir, ignore_errors=True) except Exception: print('{0}\nFailed to delete {1}'.format(traceback.format_exc(), install_dir)) sys.stdout.flush()
cleanup
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x():\n for install_dir in linters.INSTALL_DIRS:\n try:\n shutil.rmtree(install_dir, ignore_errors=True)\n except Exception:\n print('{0}\\nFailed to delete {1}'.format(traceback.format_exc(),\n install_dir))\n sys.stdout.flush()\n", "role": "user" }, { "content": "cleanup", "role": "assistant" } ]
async def apply_json(self, data): """Apply a JSON data object for a service """ if not isinstance(data, list): data = [data] result = [] for entry in data: if not isinstance(entry, dict): raise KongError('dictionary required') ensure = entry.pop('ensure', None) name = entry.pop('name', None) routes = entry.pop('routes', []) plugins = entry.pop('plugins', []) host = entry.pop('host', None) if host in LOCAL_HOST: host = local_ip() if not name: raise KongError('Service name is required') if ensure in REMOVE: if await self.has(name): await self.delete(name) continue config = entry.pop('config', None) if isinstance(config, dict): entry.update(config) if await self.has(name): srv = await self.update(name, host=host, **entry) else: srv = await self.create(name=name, host=host, **entry) srv.data['routes'] = await srv.routes.apply_json(routes) srv.data['plugins'] = await srv.plugins.apply_json(plugins) result.append(srv.data) return result
apply_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "async def apply_json(self, data):\n \"\"\"Apply a JSON data object for a service\n \"\"\"\n if not isinstance(data, list):\n data = [data]\n result = []\n for entry in data:\n if not isinstance(entry, dict):\n raise KongError('dictionary required')\n ensure = entry.pop('ensure', None)\n name = entry.pop('name', None)\n routes = entry.pop('routes', [])\n plugins = entry.pop('plugins', [])\n host = entry.pop('host', None)\n if host in LOCAL_HOST:\n host = local_ip()\n if not name:\n raise KongError('Service name is required')\n if ensure in REMOVE:\n if await self.has(name):\n await self.delete(name)\n continue\n config = entry.pop('config', None)\n if isinstance(config, dict):\n entry.update(config)\n if await self.has(name):\n srv = await self.update(name, host=host, **entry)\n else:\n srv = await self.create(name=name, host=host, **entry)\n srv.data['routes'] = await srv.routes.apply_json(routes)\n srv.data['plugins'] = await srv.plugins.apply_json(plugins)\n result.append(srv.data)\n return result\n", "role": "user" }, { "content": "apply_json", "role": "assistant" } ]
async def apply_json(self, data): """Apply a JSON data object for a service """ if not isinstance(data, list): data = [data] result = [] for entry in data: name = entry.pop('name') if await self.has(name): sni = await self.update(name, **entry) else: sni = await self.create(name=name, **entry) result.append(sni.data) return result
apply_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "async def apply_json(self, data):\n \"\"\"Apply a JSON data object for a service\n \"\"\"\n if not isinstance(data, list):\n data = [data]\n result = []\n for entry in data:\n name = entry.pop('name')\n if await self.has(name):\n sni = await self.update(name, **entry)\n else:\n sni = await self.create(name=name, **entry)\n result.append(sni.data)\n return result\n", "role": "user" }, { "content": "apply_json", "role": "assistant" } ]
def x(self, size): self.instance.resize_volume(size) self.size = size
resize
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, size):\n self.instance.resize_volume(size)\n self.size = size\n", "role": "user" }, { "content": "resize", "role": "assistant" } ]
def x(self, item): resource = super(CloudDatabaseManager, self).get(item) resource.volume = CloudDatabaseVolume(resource, resource.volume) return resource
get
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, item):\n resource = super(CloudDatabaseManager, self).get(item)\n resource.volume = CloudDatabaseVolume(resource, resource.volume)\n return resource\n", "role": "user" }, { "content": "get", "role": "assistant" } ]
def x(self, name, flavor=None, volume=None, databases=None, users=None, version=None, type=None): if flavor is None: flavor = 1 flavor_ref = self.api._get_flavor_ref(flavor) if volume is None: volume = 1 if databases is None: databases = [] if users is None: users = [] body = {'instance': {'name': name, 'flavorRef': flavor_ref, 'volume': { 'size': volume}, 'databases': databases, 'users': users}} if type is not None or version is not None: required = type, version if all(required): body['instance']['datastore'] = {'type': type, 'version': version} else: raise exc.MissingCloudDatabaseParameter( 'Specifying a datastore requires both the datastore type as well as the version.' ) return body
create_body
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, name, flavor=None, volume=None, databases=None, users=None,\n version=None, type=None):\n if flavor is None:\n flavor = 1\n flavor_ref = self.api._get_flavor_ref(flavor)\n if volume is None:\n volume = 1\n if databases is None:\n databases = []\n if users is None:\n users = []\n body = {'instance': {'name': name, 'flavorRef': flavor_ref, 'volume': {\n 'size': volume}, 'databases': databases, 'users': users}}\n if type is not None or version is not None:\n required = type, version\n if all(required):\n body['instance']['datastore'] = {'type': type, 'version': version}\n else:\n raise exc.MissingCloudDatabaseParameter(\n 'Specifying a datastore requires both the datastore type as well as the version.'\n )\n return body\n", "role": "user" }, { "content": "create_body", "role": "assistant" } ]
def x(self, instance, name, description=None): body = {'backup': {'instance': utils.get_id(instance), 'name': name}} if description is not None: body['backup']['description'] = description uri = '/backups' resp, resp_body = self.api.method_post(uri, body=body) mgr = self.api._backup_manager return CloudDatabaseBackup(mgr, body.get('backup'))
create_backup
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, instance, name, description=None):\n body = {'backup': {'instance': utils.get_id(instance), 'name': name}}\n if description is not None:\n body['backup']['description'] = description\n uri = '/backups'\n resp, resp_body = self.api.method_post(uri, body=body)\n mgr = self.api._backup_manager\n return CloudDatabaseBackup(mgr, body.get('backup'))\n", "role": "user" }, { "content": "create_backup", "role": "assistant" } ]
def x(self, backup, name, flavor, volume): flavor_ref = self.api._get_flavor_ref(flavor) body = {'instance': {'name': name, 'flavorRef': flavor_ref, 'volume': { 'size': volume}, 'restorePoint': {'backupRef': utils.get_id(backup)}}} uri = '/%s' % self.uri_base resp, resp_body = self.api.method_post(uri, body=body) return CloudDatabaseInstance(self, resp_body.get('instance', {}))
restore_backup
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, backup, name, flavor, volume):\n flavor_ref = self.api._get_flavor_ref(flavor)\n body = {'instance': {'name': name, 'flavorRef': flavor_ref, 'volume': {\n 'size': volume}, 'restorePoint': {'backupRef': utils.get_id(backup)}}}\n uri = '/%s' % self.uri_base\n resp, resp_body = self.api.method_post(uri, body=body)\n return CloudDatabaseInstance(self, resp_body.get('instance', {}))\n", "role": "user" }, { "content": "restore_backup", "role": "assistant" } ]
def x(self, instance=None, marker=0, limit=20): return self.api._backup_manager.list(instance=instance, limit=limit, marker=marker)
list_backups
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, instance=None, marker=0, limit=20):\n return self.api._backup_manager.list(instance=instance, limit=limit,\n marker=marker)\n", "role": "user" }, { "content": "list_backups", "role": "assistant" } ]
def x(self, instance, marker=0, limit=20): uri = '/%s/%s/backups?limit=%d&marker=%d' % (self.uri_base, utils. get_id(instance), int(limit), int(marker)) resp, resp_body = self.api.method_get(uri) mgr = self.api._backup_manager return [CloudDatabaseBackup(mgr, backup) for backup in resp_body.get( 'backups')]
list_backups_for_instance
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, instance, marker=0, limit=20):\n uri = '/%s/%s/backups?limit=%d&marker=%d' % (self.uri_base, utils.\n get_id(instance), int(limit), int(marker))\n resp, resp_body = self.api.method_get(uri)\n mgr = self.api._backup_manager\n return [CloudDatabaseBackup(mgr, backup) for backup in resp_body.get(\n 'backups')]\n", "role": "user" }, { "content": "list_backups_for_instance", "role": "assistant" } ]
def x(self, dbs, strict=True): dbs = utils.coerce_to_list(dbs) db_names = [utils.get_name(db) for db in dbs] if strict: good_dbs = self.instance.list_databases() good_names = [utils.get_name(good_db) for good_db in good_dbs] bad_names = [db_name for db_name in db_names if db_name not in good_names] if bad_names: bad = ', '.join(bad_names) raise exc.NoSuchDatabase( 'The following database(s) were not found: %s' % bad) return db_names
get_db_names
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, dbs, strict=True):\n dbs = utils.coerce_to_list(dbs)\n db_names = [utils.get_name(db) for db in dbs]\n if strict:\n good_dbs = self.instance.list_databases()\n good_names = [utils.get_name(good_db) for good_db in good_dbs]\n bad_names = [db_name for db_name in db_names if db_name not in\n good_names]\n if bad_names:\n bad = ', '.join(bad_names)\n raise exc.NoSuchDatabase(\n 'The following database(s) were not found: %s' % bad)\n return db_names\n", "role": "user" }, { "content": "get_db_names", "role": "assistant" } ]
def x(self, user, name=None, password=None, host=None): if not any((name, password, host)): raise exc.MissingDBUserParameters( 'You must supply at least one of the following: new username, new password, or new host specification.' ) if not isinstance(user, CloudDatabaseUser): user = self.get(user) dct = {} if name and name != user.name: dct['name'] = name if host and host != user.host: dct['host'] = host if password: dct['password'] = password if not dct: raise exc.DBUpdateUnchanged( 'You must supply at least one changed value when updating a user.') uri = '/%s/%s' % (self.uri_base, user.name) body = {'user': dct} resp, resp_body = self.api.method_put(uri, body=body) return None
update
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, user, name=None, password=None, host=None):\n if not any((name, password, host)):\n raise exc.MissingDBUserParameters(\n 'You must supply at least one of the following: new username, new password, or new host specification.'\n )\n if not isinstance(user, CloudDatabaseUser):\n user = self.get(user)\n dct = {}\n if name and name != user.name:\n dct['name'] = name\n if host and host != user.host:\n dct['host'] = host\n if password:\n dct['password'] = password\n if not dct:\n raise exc.DBUpdateUnchanged(\n 'You must supply at least one changed value when updating a user.')\n uri = '/%s/%s' % (self.uri_base, user.name)\n body = {'user': dct}\n resp, resp_body = self.api.method_put(uri, body=body)\n return None\n", "role": "user" }, { "content": "update", "role": "assistant" } ]
def x(self, user): user = utils.get_name(user) uri = '/%s/%s/databases' % (self.uri_base, user) try: resp, resp_body = self.api.method_get(uri) except exc.NotFound as e: raise exc.NoSuchDatabaseUser("User '%s' does not exist." % user) dbs = resp_body.get('databases', {}) return [CloudDatabaseDatabase(self, db) for db in dbs]
list_user_access
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, user):\n user = utils.get_name(user)\n uri = '/%s/%s/databases' % (self.uri_base, user)\n try:\n resp, resp_body = self.api.method_get(uri)\n except exc.NotFound as e:\n raise exc.NoSuchDatabaseUser(\"User '%s' does not exist.\" % user)\n dbs = resp_body.get('databases', {})\n return [CloudDatabaseDatabase(self, db) for db in dbs]\n", "role": "user" }, { "content": "list_user_access", "role": "assistant" } ]
def x(self, user, db_names, strict=True): user = utils.get_name(user) uri = '/%s/%s/databases' % (self.uri_base, user) db_names = self._get_db_names(db_names, strict=strict) dbs = [{'name': db_name} for db_name in db_names] body = {'databases': dbs} try: resp, resp_body = self.api.method_put(uri, body=body) except exc.NotFound as e: raise exc.NoSuchDatabaseUser("User '%s' does not exist." % user)
grant_user_access
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, user, db_names, strict=True):\n user = utils.get_name(user)\n uri = '/%s/%s/databases' % (self.uri_base, user)\n db_names = self._get_db_names(db_names, strict=strict)\n dbs = [{'name': db_name} for db_name in db_names]\n body = {'databases': dbs}\n try:\n resp, resp_body = self.api.method_put(uri, body=body)\n except exc.NotFound as e:\n raise exc.NoSuchDatabaseUser(\"User '%s' does not exist.\" % user)\n", "role": "user" }, { "content": "grant_user_access", "role": "assistant" } ]
def x(self, user, db_names, strict=True): user = utils.get_name(user) db_names = self._get_db_names(db_names, strict=strict) bad_names = [] for db_name in db_names: uri = '/%s/%s/databases/%s' % (self.uri_base, user, db_name) resp, resp_body = self.api.method_delete(uri)
revoke_user_access
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, user, db_names, strict=True):\n user = utils.get_name(user)\n db_names = self._get_db_names(db_names, strict=strict)\n bad_names = []\n for db_name in db_names:\n uri = '/%s/%s/databases/%s' % (self.uri_base, user, db_name)\n resp, resp_body = self.api.method_delete(uri)\n", "role": "user" }, { "content": "revoke_user_access", "role": "assistant" } ]
def x(self, instance=None, limit=20, marker=0): if instance is None: return super(CloudDatabaseBackupManager, self).list() return self.api._manager._list_backups_for_instance(instance, limit= limit, marker=marker)
list
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, instance=None, limit=20, marker=0):\n if instance is None:\n return super(CloudDatabaseBackupManager, self).list()\n return self.api._manager._list_backups_for_instance(instance, limit=\n limit, marker=marker)\n", "role": "user" }, { "content": "list", "role": "assistant" } ]
def x(self): super(CloudDatabaseInstance, self).get() self.volume = CloudDatabaseVolume(self, self.volume)
get
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n super(CloudDatabaseInstance, self).get()\n self.volume = CloudDatabaseVolume(self, self.volume)\n", "role": "user" }, { "content": "get", "role": "assistant" } ]
def x(self, limit=None, marker=None): return self._database_manager.list(limit=limit, marker=marker)
list_databases
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, limit=None, marker=None):\n return self._database_manager.list(limit=limit, marker=marker)\n", "role": "user" }, { "content": "list_databases", "role": "assistant" } ]
def x(self, limit=None, marker=None): return self._user_manager.list(limit=limit, marker=marker)
list_users
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, limit=None, marker=None):\n return self._user_manager.list(limit=limit, marker=marker)\n", "role": "user" }, { "content": "list_users", "role": "assistant" } ]
def x(self, name): try: return self._user_manager.get(name) except exc.NotFound: raise exc.NoSuchDatabaseUser("No user by the name '%s' exists." % name)
get_user
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, name):\n try:\n return self._user_manager.get(name)\n except exc.NotFound:\n raise exc.NoSuchDatabaseUser(\"No user by the name '%s' exists.\" % name)\n", "role": "user" }, { "content": "get_user", "role": "assistant" } ]
def x(self, name): try: return [db for db in self.list_databases() if db.name == name][0] except IndexError: raise exc.NoSuchDatabase("No database by the name '%s' exists." % name)
get_database
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, name):\n try:\n return [db for db in self.list_databases() if db.name == name][0]\n except IndexError:\n raise exc.NoSuchDatabase(\"No database by the name '%s' exists.\" % name)\n", "role": "user" }, { "content": "get_database", "role": "assistant" } ]
def x(self, name, character_set=None, collate=None): if character_set is None: character_set = 'utf8' if collate is None: collate = 'utf8_general_ci' self._database_manager.create(name=name, character_set=character_set, collate=collate, return_none=True) return self._database_manager.find(name=name)
create_database
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, name, character_set=None, collate=None):\n if character_set is None:\n character_set = 'utf8'\n if collate is None:\n collate = 'utf8_general_ci'\n self._database_manager.create(name=name, character_set=character_set,\n collate=collate, return_none=True)\n return self._database_manager.find(name=name)\n", "role": "user" }, { "content": "create_database", "role": "assistant" } ]
def x(self, name, password, database_names, host=None): if not isinstance(database_names, (list, tuple)): database_names = [database_names] database_names = [(db if isinstance(db, six.string_types) else db.name) for db in database_names] self._user_manager.create(name=name, password=password, database_names= database_names, host=host, return_none=True) return self._user_manager.find(name=name)
create_user
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, name, password, database_names, host=None):\n if not isinstance(database_names, (list, tuple)):\n database_names = [database_names]\n database_names = [(db if isinstance(db, six.string_types) else db.name) for\n db in database_names]\n self._user_manager.create(name=name, password=password, database_names=\n database_names, host=host, return_none=True)\n return self._user_manager.find(name=name)\n", "role": "user" }, { "content": "create_user", "role": "assistant" } ]